You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/01/23 14:00:48 UTC

[1/2] git commit: AMBARI-4372. Write unnitests for HIVE install script on HDP1 and HDP2 (Arsen Babych via onishuk)

Updated Branches:
  refs/heads/trunk 99cabb956 -> 2e34a088b


AMBARI-4372. Write unnitests for HIVE install script on HDP1 and HDP2
(Arsen Babych via onishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/151df0ba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/151df0ba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/151df0ba

Branch: refs/heads/trunk
Commit: 151df0baabd3ed3d5ffce86fa34ad345849c045d
Parents: 99cabb9
Author: Andrew Onischuk <ao...@hortonworks.com>
Authored: Thu Jan 23 04:35:06 2014 -0800
Committer: Andrew Onischuk <ao...@hortonworks.com>
Committed: Thu Jan 23 04:35:06 2014 -0800

----------------------------------------------------------------------
 .../stacks/1.3.3/HIVE/test_hcat_client.py       |  67 ++++++
 .../stacks/1.3.3/HIVE/test_hive_client.py       | 115 +++++++++
 .../stacks/1.3.3/HIVE/test_hive_metastore.py    | 230 ++++++++++++++++++
 .../stacks/1.3.3/HIVE/test_hive_server.py       | 231 +++++++++++++++++++
 .../stacks/1.3.3/HIVE/test_mysql_server.py      | 142 ++++++++++++
 .../stacks/2.1.1/HIVE/test_hcat_client.py       |  67 ++++++
 .../stacks/2.1.1/HIVE/test_hive_client.py       | 115 +++++++++
 .../stacks/2.1.1/HIVE/test_hive_metastore.py    | 229 ++++++++++++++++++
 .../stacks/2.1.1/HIVE/test_hive_server.py       | 231 +++++++++++++++++++
 .../stacks/2.1.1/HIVE/test_mysql_server.py      | 142 ++++++++++++
 .../src/test/python/stacks/utils/RMFTestCase.py |   6 +-
 11 files changed, 1572 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hcat_client.py b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hcat_client.py
new file mode 100644
index 0000000..5a935aa
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hcat_client.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHcatClient(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hcat_client.py",
+                       classname = "HCatClient",
+                       command = "configure",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+      owner = 'hcat',
+      recursive = True,
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hcatalog/conf/hcat-env.sh',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
+
+
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hcat_client.py",
+                         classname = "HCatClient",
+                         command = "configure",
+                         config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+      owner = 'hcat',
+      recursive = True,
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hcatalog/conf/hcat-env.sh',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_client.py
new file mode 100644
index 0000000..bcd5d75
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_client.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveClient(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_client.py",
+                       classname = "HiveClient",
+                       command = "configure",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hive/conf',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
+
+
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_client.py",
+                       classname = "HiveClient",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hive/conf',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_metastore.py
new file mode 100644
index 0000000..c5088dd
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_metastore.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveMetastore(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid')
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid')
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+      content = StaticFile('startMetastore.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+      content = StaticFile('startMetastore.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_server.py
new file mode 100644
index 0000000..e669411
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_hive_server.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+  
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                         classname = "HiveServer",
+                         command = "start",
+                         config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid')
+    self.assertNoMoreResources()
+
+    
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid')
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+      content = StaticFile('startHiveserver2.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+      content = StaticFile('startHiveserver2.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_mysql_server.py b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_mysql_server.py
new file mode 100644
index 0000000..2b4a531
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.3/HIVE/test_mysql_server.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestMySqlServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld start',
+                       logoutput = True,
+                       path = ['/usr/local/bin/:/bin/:/sbin/'],
+                       tries = 1,
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+                              logoutput = True,
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              tries = 1,
+    )
+    self.assertNoMoreResources()
+
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld start',
+                              logoutput = True,
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              tries = 1,
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+                              logoutput = True,
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              tries = 1,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Execute', 'service mysqld start',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )
+    self.assertResourceCalled('File', '/tmp/addMysqlUser.sh',
+      content = StaticFile('addMysqlUser.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Execute', ('bash', '-x', '/tmp/addMysqlUser.sh', 'mysqld', u'hive', 'asd', u'c6402.ambari.apache.org'),
+      logoutput = True,
+      path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+      tries = 3,
+      try_sleep = 5,
+    )
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', 'service mysqld start',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )
+    self.assertResourceCalled('File', '/tmp/addMysqlUser.sh',
+      content = StaticFile('addMysqlUser.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Execute', ('bash', '-x', '/tmp/addMysqlUser.sh', 'mysqld', u'hive', 'asd', u'c6402.ambari.apache.org'),
+      logoutput = True,
+      path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+      tries = 3,
+      try_sleep = 5,
+    )
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hcat_client.py b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hcat_client.py
new file mode 100644
index 0000000..5a935aa
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hcat_client.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHcatClient(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hcat_client.py",
+                       classname = "HCatClient",
+                       command = "configure",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+      owner = 'hcat',
+      recursive = True,
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hcatalog/conf/hcat-env.sh',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
+
+
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hcat_client.py",
+                         classname = "HCatClient",
+                         command = "configure",
+                         config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+      owner = 'hcat',
+      recursive = True,
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hcatalog/conf/hcat-env.sh',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_client.py
new file mode 100644
index 0000000..bcd5d75
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_client.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveClient(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_client.py",
+                       classname = "HiveClient",
+                       command = "configure",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hive/conf',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
+
+
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_client.py",
+                       classname = "HiveClient",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hive/conf',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_metastore.py
new file mode 100644
index 0000000..7ce7063
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_metastore.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveMetastore(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid')
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid')
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+      content = StaticFile('startMetastore.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+      content = StaticFile('startMetastore.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_server.py
new file mode 100644
index 0000000..e669411
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_hive_server.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+  
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                         classname = "HiveServer",
+                         command = "start",
+                         config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid')
+    self.assertNoMoreResources()
+
+    
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server',
+                              not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+    )
+
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid')
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+      content = StaticFile('startHiveserver2.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+      path = ['/bin', 'usr/bin/'],
+      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+      owner = 'hive',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 384,
+      conf_dir = '/etc/hive/conf.server',
+      configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+      not_if = '[ -f DBConnectionVerification.jar]',
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+      content = StaticFile('startHiveserver2.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+      owner = 'hive',
+      group = 'hadoop',
+      mode = 493,
+      recursive = True,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+      content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties.template',
+      owner = 'hive',
+      group = 'hadoop',
+    )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_mysql_server.py b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_mysql_server.py
new file mode 100644
index 0000000..2b4a531
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/HIVE/test_mysql_server.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestMySqlServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld start',
+                       logoutput = True,
+                       path = ['/usr/local/bin/:/bin/:/sbin/'],
+                       tries = 1,
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+                              logoutput = True,
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              tries = 1,
+    )
+    self.assertNoMoreResources()
+
+
+  def test_configure_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld start',
+                              logoutput = True,
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              tries = 1,
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/HIVE/package/scripts/mysql_server.py",
+                       classname = "MysqlServer",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+                              logoutput = True,
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              tries = 1,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Execute', 'service mysqld start',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )
+    self.assertResourceCalled('File', '/tmp/addMysqlUser.sh',
+      content = StaticFile('addMysqlUser.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Execute', ('bash', '-x', '/tmp/addMysqlUser.sh', 'mysqld', u'hive', 'asd', u'c6402.ambari.apache.org'),
+      logoutput = True,
+      path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+      tries = 3,
+      try_sleep = 5,
+    )
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', 'service mysqld start',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )
+    self.assertResourceCalled('File', '/tmp/addMysqlUser.sh',
+      content = StaticFile('addMysqlUser.sh'),
+      mode = 493,
+    )
+    self.assertResourceCalled('Execute', ('bash', '-x', '/tmp/addMysqlUser.sh', 'mysqld', u'hive', 'asd', u'c6402.ambari.apache.org'),
+      logoutput = True,
+      path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+      tries = 3,
+      try_sleep = 5,
+    )
+    self.assertResourceCalled('Execute', 'service mysqld stop',
+      logoutput = True,
+      path = ['/usr/local/bin/:/bin/:/sbin/'],
+      tries = 1,
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/151df0ba/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/utils/RMFTestCase.py b/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
index 9f9b0d6..ee41b17 100644
--- a/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
+++ b/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
@@ -121,15 +121,15 @@ class RMFTestCase(TestCase):
 def Template(name, **kwargs):
   with RMFTestCase.env:
     from resource_management.core.source import Template
-    return Template(name, kwargs)
+    return Template(name, **kwargs)
   
 def StaticFile(name, **kwargs):
   with RMFTestCase.env:
     from resource_management.core.source import StaticFile
-    return StaticFile(name, kwargs)
+    return StaticFile(name, **kwargs)
   
 def InlineTemplate(name, **kwargs):
   with RMFTestCase.env:
     from resource_management.core.source import InlineTemplate
-    return InlineTemplate(name, kwargs)
+    return InlineTemplate(name, **kwargs)
 


[2/2] git commit: AMBARI-4387. Write unnitests for YARN install script (Arsen Babych via onishuk) tests for YARN install script# Please enter the commit message for your changes. Lines starting

Posted by ao...@apache.org.
AMBARI-4387. Write unnitests for YARN install script (Arsen Babych via onishuk)
tests for YARN install script# Please enter the commit message for your changes. Lines starting


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2e34a088
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2e34a088
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2e34a088

Branch: refs/heads/trunk
Commit: 2e34a088b62ac2762eb35f21b064bd0a0cf0ea0d
Parents: 151df0b
Author: Andrew Onischuk <ao...@hortonworks.com>
Authored: Thu Jan 23 04:57:30 2014 -0800
Committer: Andrew Onischuk <ao...@hortonworks.com>
Committed: Thu Jan 23 04:57:30 2014 -0800

----------------------------------------------------------------------
 .../MAPREDUCE/package/scripts/historyserver.py  |   1 +
 .../MAPREDUCE/package/scripts/jobtracker.py     |  21 --
 .../2.1.1/services/YARN/package/scripts/yarn.py |   7 -
 .../stacks/2.1.1/YARN/test_historyserver.py     | 289 ++++++++++++++++++
 .../stacks/2.1.1/YARN/test_mapreduce2_client.py | 215 ++++++++++++++
 .../stacks/2.1.1/YARN/test_nodemanager.py       | 290 +++++++++++++++++++
 .../stacks/2.1.1/YARN/test_resourcemanager.py   | 288 ++++++++++++++++++
 .../stacks/2.1.1/YARN/test_yarn_client.py       | 215 ++++++++++++++
 8 files changed, 1298 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/historyserver.py
index 8eb2089..972a767 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/historyserver.py
@@ -38,6 +38,7 @@ class Historyserver(Script):
   def start(self, env):
     import params
     env.set_params(params)
+    self.configure(env)
     service('historyserver',
             action='start'
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/jobtracker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/jobtracker.py b/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/jobtracker.py
index 8f7f1d7..5cd41ae 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/jobtracker.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/MAPREDUCE/package/scripts/jobtracker.py
@@ -20,27 +20,6 @@ Ambari Agent
 
 """
 
-#!/usr/bin/env python2.6
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
 import sys
 from resource_management import *
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/YARN/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/YARN/package/scripts/yarn.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/YARN/package/scripts/yarn.py
index 1d97373..986356e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/YARN/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/YARN/package/scripts/yarn.py
@@ -103,13 +103,6 @@ def yarn():
        content=Template('yarn-env.sh.j2')
   )
 
-  File(format("{config_dir}/hadoop-env.sh"),
-       owner=params.hdfs_user,
-       group=params.user_group,
-       mode=0755,
-       content=StaticFile(format('{hadoop_conf_dir}/hadoop-env.sh'))
-  )
-
   if params.security_enabled:
     container_executor = format("{yarn_container_bin}/container-executor")
     File(container_executor,

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/test/python/stacks/2.1.1/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_historyserver.py
new file mode 100644
index 0000000..ea0ee0c
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_historyserver.py
@@ -0,0 +1,289 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHistoryServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/historyserver.py",
+                       classname = "Histroryserver",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/historyserver.py",
+                       classname = "Histroryserver",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh --config /etc/hadoop/conf start historyserver',
+                              not_if = 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1',
+                              user = 'mapred'
+    )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1',
+                              user = 'mapred',
+                              not_if = 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1',
+                              initial_wait=5
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/historyserver.py",
+                       classname = "Histroryserver",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh --config /etc/hadoop/conf stop historyserver',
+                              user = 'mapred'
+    )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
+                              user = 'mapred'
+    )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+
+    self.executeScript("2.1.1/services/YARN/package/scripts/historyserver.py",
+                       classname = "Histroryserver",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/historyserver.py",
+                       classname = "Histroryserver",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh --config /etc/hadoop/conf start historyserver',
+                              not_if = 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1',
+                              user = 'mapred'
+    )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1',
+                              user = 'mapred',
+                              not_if = 'ls /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid` >/dev/null 2>&1',
+                              initial_wait=5
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/historyserver.py",
+                       classname = "Histroryserver",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh --config /etc/hadoop/conf stop historyserver',
+                              user = 'mapred'
+    )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop-mapreduce/mapred/mapred-mapred-historyserver.pid',
+                              user = 'mapred'
+    )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+
+  def assert_configure_secured(self):
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
+      group = 'hadoop',
+      mode = 3112,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
+      content = Template('container-executor.cfg.j2'),
+      group = 'hadoop',
+      mode = 420,
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/test/python/stacks/2.1.1/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_mapreduce2_client.py
new file mode 100644
index 0000000..0ca6831
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_mapreduce2_client.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestMapReduce2Client(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/mapreduce2_client.py",
+                       classname = "MapReduce2Client",
+                       command = "configure",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+
+    self.executeScript("2.1.1/services/YARN/package/scripts/mapreduce2_client.py",
+                       classname = "MapReduce2Client",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
+      group = 'hadoop',
+      mode = 3112,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
+      content = Template('container-executor.cfg.j2'),
+      group = 'hadoop',
+      mode = 420,
+    )
+    self.assertNoMoreResources()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/test/python/stacks/2.1.1/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_nodemanager.py
new file mode 100644
index 0000000..cd0c5bd
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_nodemanager.py
@@ -0,0 +1,290 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestNodeManager(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/nodemanager.py",
+                       classname = "Nodemanager",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/nodemanager.py",
+                       classname = "Nodemanager",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1',
+                              initial_wait=5
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/nodemanager.py",
+                       classname = "Nodemanager",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop nodemanager',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
+                              user = 'yarn'
+    )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+
+    self.executeScript("2.1.1/services/YARN/package/scripts/nodemanager.py",
+                       classname = "Nodemanager",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/nodemanager.py",
+                       classname = "Nodemanager",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1',
+                              initial_wait=5
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/nodemanager.py",
+                       classname = "Nodemanager",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop nodemanager',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
+                              user = 'yarn'
+    )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+
+  def assert_configure_secured(self):
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
+      group = 'hadoop',
+      mode = 3112,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
+      content = Template('container-executor.cfg.j2'),
+      group = 'hadoop',
+      mode = 420,
+    )
+    
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/test/python/stacks/2.1.1/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_resourcemanager.py
new file mode 100644
index 0000000..18ef59d
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_resourcemanager.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestResourceManager(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/resourcemanager.py",
+                       classname = "Resourcemanager",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/resourcemanager.py",
+                       classname = "Resourcemanager",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start resourcemanager',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              initial_wait=5
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/resourcemanager.py",
+                       classname = "Resourcemanager",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop resourcemanager',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
+                              user = 'yarn'
+    )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+
+    self.executeScript("2.1.1/services/YARN/package/scripts/resourcemanager.py",
+                       classname = "Resourcemanager",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+
+  def test_start_secured(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/resourcemanager.py",
+                       classname = "Resourcemanager",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start resourcemanager',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              user = 'yarn',
+                              not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+                              initial_wait=5
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/resourcemanager.py",
+                       classname = "Resourcemanager",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop resourcemanager',
+                              user = 'yarn'
+    )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
+                              user = 'yarn'
+    )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+
+  def assert_configure_secured(self):
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
+      group = 'hadoop',
+      mode = 3112,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
+      content = Template('container-executor.cfg.j2'),
+      group = 'hadoop',
+      mode = 420,
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2e34a088/ambari-server/src/test/python/stacks/2.1.1/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_yarn_client.py
new file mode 100644
index 0000000..a956afc
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1.1/YARN/test_yarn_client.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestYarnClient(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.1.1/services/YARN/package/scripts/yarn_client.py",
+                       classname = "YarnClient",
+                       command = "configure",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+
+    self.executeScript("2.1.1/services/YARN/package/scripts/yarn_client.py",
+                       classname = "YarnClient",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+      owner = 'yarn',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+      owner = 'mapred',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/local',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/yarn/log',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+      owner = 'yarn',
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['yarn-site'],
+    )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 420,
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['capacity-scheduler'],
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+      owner = 'yarn',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/var/log/hadoop-mapreduce/mapred/hadoop-mapreduce.jobsummary.log',
+      owner = 'mapred',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+      content = Template('yarn.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+      content = Template('mapreduce.conf.j2'),
+      mode = 420,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+      content = Template('yarn-env.sh.j2'),
+      owner = 'yarn',
+      group = 'hadoop',
+      mode = 493,
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
+      group = 'hadoop',
+      mode = 3112,
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
+      content = Template('container-executor.cfg.j2'),
+      group = 'hadoop',
+      mode = 420,
+    )
+    self.assertNoMoreResources()
+