You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/02/17 18:18:58 UTC

[4/5] git commit: AMBARI-4687. Write unnitests for HDFS install script on HDP1 and HDP2 (Eugene Chekanskiy via aonishuk)

AMBARI-4687. Write unnitests for HDFS install script on HDP1 and HDP2
(Eugene Chekanskiy via aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a5a02039
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a5a02039
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a5a02039

Branch: refs/heads/trunk
Commit: a5a0203967b1076d579c8237f7e343b5794b715e
Parents: 33bf1d4
Author: Andrew Onischuk <ao...@hortonworks.com>
Authored: Mon Feb 17 08:37:54 2014 -0800
Committer: Andrew Onischuk <ao...@hortonworks.com>
Committed: Mon Feb 17 09:18:37 2014 -0800

----------------------------------------------------------------------
 .../services/HDFS/package/scripts/datanode.py   |   4 +-
 .../services/HDFS/package/scripts/namenode.py   |   4 +-
 .../services/HDFS/package/scripts/namenode.py   |   8 +-
 .../services/HDFS/package/scripts/snamenode.py  |   4 +-
 .../services/HDFS/package/scripts/zkfc_slave.py |   4 +-
 .../python/stacks/1.3.2/HDFS/test_datanode.py   | 175 +++++++
 .../python/stacks/1.3.2/HDFS/test_namenode.py   | 227 ++++++++
 .../python/stacks/1.3.2/HDFS/test_snamenode.py  | 153 ++++++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 168 ++++++
 .../stacks/2.0.6/HDFS/test_journalnode.py       | 155 ++++++
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 368 +++++++++++++
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  | 156 ++++++
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  | 121 +++++
 .../python/stacks/2.0.6/configs/ha_default.json | 437 ++++++++++++++++
 .../python/stacks/2.0.6/configs/ha_secured.json | 521 +++++++++++++++++++
 15 files changed, 2493 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
index eaa27cf..57fdb35 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
@@ -32,7 +32,7 @@ class DataNode(Script):
     import params
 
     env.set_params(params)
-    self.config(env)
+    self.configure(env)
     datanode(action="start")
 
   def stop(self, env):
@@ -41,7 +41,7 @@ class DataNode(Script):
     env.set_params(params)
     datanode(action="stop")
 
-  def config(self, env):
+  def configure(self, env):
     import params
 
     datanode(action="configure")

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
index 80700c8..2f26c98 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
@@ -32,7 +32,7 @@ class NameNode(Script):
     import params
 
     env.set_params(params)
-    self.config(env)
+    self.configure(env)
     namenode(action="start")
 
   def stop(self, env):
@@ -41,7 +41,7 @@ class NameNode(Script):
     env.set_params(params)
     namenode(action="stop")
 
-  def config(self, env):
+  def configure(self, env):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
index deb01d5..2179292 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
@@ -27,14 +27,14 @@ class NameNode(Script):
 
     self.install_packages(env)
     env.set_params(params)
-    #TODO remove when config action will be implemented
-    self.config(env)
+    #TODO we need this for HA because of manual steps
+    self.configure(env)
 
   def start(self, env):
     import params
 
     env.set_params(params)
-    self.config(env)
+    self.configure(env)
     namenode(action="start")
 
   def stop(self, env):
@@ -43,7 +43,7 @@ class NameNode(Script):
     env.set_params(params)
     namenode(action="stop")
 
-  def config(self, env):
+  def configure(self, env):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
index 8f682ec..b2a3bd1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
@@ -35,7 +35,7 @@ class SNameNode(Script):
 
     env.set_params(params)
 
-    self.config(env)
+    self.configure(env)
     snamenode(action="start")
 
   def stop(self, env):
@@ -45,7 +45,7 @@ class SNameNode(Script):
 
     snamenode(action="stop")
 
-  def config(self, env):
+  def configure(self, env):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
index 1f9ba65..f415f24 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
@@ -32,7 +32,7 @@ class ZkfcSlave(Script):
     import params
 
     env.set_params(params)
-    self.config(env)
+    self.configure(env)
     service(
       action="start", name="zkfc", user=params.hdfs_user, create_pid_dir=True,
       create_log_dir=True
@@ -47,7 +47,7 @@ class ZkfcSlave(Script):
       create_log_dir=True
     )
 
-  def config(self, env):
+  def configure(self, env):
     pass
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
new file mode 100644
index 0000000..42b9fe0
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestDatanode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6402.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+                              user = 'root',
+                              )
+
+  def test_stop_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6402.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+                              not_if = None,
+                              user = 'root',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0750,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs',
+                              mode = 0755,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0750,
+                              recursive = False,
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0750,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs',
+                              mode = 0755,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0750,
+                              recursive = False,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
new file mode 100644
index 0000000..0bed3d6
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestNamenode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('File', '/tmp/checkForFormat.sh',
+                              content = StaticFile('checkForFormat.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'sh /tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+                              tries = 40,
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('File', '/tmp/checkForFormat.sh',
+                              content = StaticFile('checkForFormat.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'sh /tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+                              tries = 40,
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
new file mode 100644
index 0000000..9436264
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestSNamenode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+
+  def test_configure_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+
+  def test_stop_secured(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
new file mode 100644
index 0000000..838c53a
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestDatanode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+                              user = 'root',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/datanode.py",
+                       classname = "DataNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+                              not_if = None,
+                              user = 'root',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0750,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0750,
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
new file mode 100644
index 0000000..f453a6a
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestJournalnode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/journalnode.py",
+                       classname = "JournalNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/journalnode.py",
+                       classname = "JournalNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/journalnode.py",
+                       classname = "JournalNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/journalnode.py",
+                       classname = "JournalNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/journalnode.py",
+                       classname = "JournalNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/jn.service.keytab jn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/journalnode.py",
+                       classname = "JournalNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/jn.service.keytab jn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
+
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
new file mode 100644
index 0000000..a91d3f9
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -0,0 +1,368 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestNamenode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('File', '/tmp/checkForFormat.sh',
+                              content = StaticFile('checkForFormat.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'sh /tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
+                              owner = 'hdfs',
+                              content = Template('exclude_hosts_list.j2'),
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+                              tries = 40,
+                              only_if = None,
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              only_if = None,
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('File', '/tmp/checkForFormat.sh',
+                              content = StaticFile('checkForFormat.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'sh /tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /var/run/hadoop/hdfs/namenode/formatted/',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
+                              owner = 'hdfs',
+                              content = Template('exclude_hosts_list.j2'),
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+                              tries = 40,
+                              only_if = None,
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              only_if = None,
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_start_ha_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       config_file="ha_default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
+                              owner = 'hdfs',
+                              content = Template('exclude_hosts_list.j2'),
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+                              tries = 40,
+                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              )
+    self.assertNoMoreResources()
+
+  def test_start_ha_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       config_file="ha_secured.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
+                              owner = 'hdfs',
+                              content = Template('exclude_hosts_list.j2'),
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('Execute', "su - hdfs -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'",
+                              tries = 40,
+                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              only_if = "su - hdfs -c 'hdfs haadmin -getServiceState nn1 | grep active > /dev/null'",
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
new file mode 100644
index 0000000..7693d3f
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestSNamenode(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "start",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "stop",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_configure_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "configure",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "start",
+                       config_file="secured.json"
+    )
+    self.assert_configure_secured()
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/snamenode.py",
+                       classname = "SNameNode",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5a02039/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
new file mode 100644
index 0000000..b258c5d
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+
+class TestZkfc(RMFTestCase):
+
+  def test_start_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/zkfc_slave.py",
+                       classname = "ZkfcSlave",
+                       command = "start",
+                       config_file="ha_default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+
+  def test_stop_default(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/zkfc_slave.py",
+                       classname = "ZkfcSlave",
+                       command = "stop",
+                       config_file="ha_default.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/zkfc_slave.py",
+                       classname = "ZkfcSlave",
+                       command = "start",
+                       config_file="ha_secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
+                              user = 'hdfs',
+                              )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.0.6/services/HDFS/package/scripts/zkfc_slave.py",
+                       classname = "ZkfcSlave",
+                       command = "stop",
+                       config_file="ha_secured.json"
+    )
+    self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Execute', 'true',
+                              )
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc',
+                              not_if = None,
+                              user = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
+                              action = ['delete'],
+                              ignore_failures = True,
+                              )
+    self.assertNoMoreResources()