You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/12/29 14:42:43 UTC

[1/3] ambari git commit: AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)

Repository: ambari
Updated Branches:
  refs/heads/trunk b7dc5dce9 -> df9e096f1


http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 68d650c..5797bf0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -21,6 +21,7 @@ import socket
 import subprocess
 
 from mock.mock import MagicMock, patch
+from resource_management.libraries.functions import version
 from resource_management.core import shell
 from stacks.utils.RMFTestCase import *
 
@@ -170,89 +171,154 @@ class TestHiveServer(RMFTestCase):
     self.assertFalse(socket_mock.called)
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
-                              action = ['create_delayed'],
-                              mode = 0755,
-                              owner = 'tez',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              kinit_path_local = "/usr/bin/kinit"
-    )
 
-    self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
-                              action = ['create_delayed'],
-                              mode = 0755,
-                              owner = 'tez',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              kinit_path_local = "/usr/bin/kinit"
-    )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create']
+    self.assertResourceCalled('HdfsResource', '/apps/tez/',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'tez',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
     )
-
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
-                              mode=0755,
-                              owner='tez',
-                              dest_dir='/apps/tez/',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs',
-                              dest_file=None
+    self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'tez',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
     )
-
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
-                              mode=0755,
-                              owner='tez',
-                              dest_dir='/apps/tez/lib/',
-                              kinnit_if_needed='',
-                              hadoop_bin_dir='/usr/bin',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+    self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
         kinit_path_local = '/usr/bin/kinit',
-        mode = 0777,
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/tez/',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'tez',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'tez',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/hcat',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/usr/share/HDP-webhcat/hive.tar.gz',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_delayed'],
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'file',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
         owner = 'hive',
-        bin_dir = '/usr/bin',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
         action = ['create_delayed'],
+        mode = 0777,
     )
-    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+    self.assertResourceCalled('HdfsResource', '/user/hive',
         security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
         kinit_path_local = '/usr/bin/kinit',
-        mode = 0700,
+        user = 'hdfs',
         owner = 'hive',
-        bin_dir = '/usr/bin',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
         action = ['create_delayed'],
+        mode = 0700,
     )
-    self.assertResourceCalled('HdfsDirectory', None,
+    self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
         kinit_path_local = '/usr/bin/kinit',
-        bin_dir = '/usr/bin',
-        action = ['create'],
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
         mode = 0755
@@ -370,36 +436,81 @@ class TestHiveServer(RMFTestCase):
     )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
         kinit_path_local = '/usr/bin/kinit',
-        bin_dir = '/usr/bin',
-        mode = 0777,
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/hcat',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hcat',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        source = '/usr/share/HDP-webhcat/hive.tar.gz',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_delayed'],
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'file',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
         owner = 'hive',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
         action = ['create_delayed'],
+        mode = 0777,
     )
-    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+    self.assertResourceCalled('HdfsResource', '/user/hive',
         security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
         kinit_path_local = '/usr/bin/kinit',
-        mode = 0700,
-        bin_dir = '/usr/bin',
+        user = 'hdfs',
         owner = 'hive',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
         action = ['create_delayed'],
+        mode = 0700,
     )
-    self.assertResourceCalled('HdfsDirectory', None,
+    self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
-        bin_dir = '/usr/bin',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
         kinit_path_local = '/usr/bin/kinit',
-        action = ['create'],
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
         mode = 0755
@@ -545,6 +656,7 @@ class TestHiveServer(RMFTestCase):
   @patch("hive_server.HiveServer.pre_rolling_restart")
   @patch("hive_server.HiveServer.start")
   @patch.object(shell, "call", new=MagicMock(return_value=(0,"hive-server2 - 2.2.0.0-2041")))
+  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_stop_during_upgrade(self, hive_server_start_mock,
     hive_server_pre_rolling_mock):
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index 7ebd4b9..f341c9b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -116,37 +116,6 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
@@ -164,42 +133,6 @@ class TestWebHCatServer(RMFTestCase):
                               group = 'hadoop',
                               recursive = True,
                               )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_bin_dir='/usr/bin',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_bin_dir='/usr/bin',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',
@@ -214,37 +147,6 @@ class TestWebHCatServer(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
@@ -266,42 +168,6 @@ class TestWebHCatServer(RMFTestCase):
                               path = ['/bin'],
                               user = 'hcat',
                               )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hadoop_bin_dir='/usr/bin',
-                              hdfs_user='hdfs'
-    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 1fa2677..5aa21f6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -139,16 +139,28 @@ class TestOozieServer(RMFTestCase):
 
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0775,
-                              owner = 'oozie',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
+    self.assertResourceCalled('HdfsResource', '/user/oozie',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'oozie',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
@@ -292,17 +304,29 @@ class TestOozieServer(RMFTestCase):
 
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0775,
-                              owner = 'oozie',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+    self.assertResourceCalled('HdfsResource', '/user/oozie',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'oozie',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0775,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
index fbd6efc..6a0b6f2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
@@ -4,7 +4,7 @@
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file
 distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
+regarding copyright ownership.  The ASF licenses this file`
 to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
@@ -32,15 +32,50 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
-      try_sleep = 5,
-      tries = 3,
-      user = 'ambari-qa',
-      conf_dir = '/etc/hadoop/conf',
-      security_enabled = False,
-      keytab = UnknownConfigurationMock(),
-      bin_dir = '/usr/bin',
-      kinit_path_local = '/usr/bin/kinit'
+
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        source = '/etc/passwd',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['create_delayed'],
+        hadoop_bin_dir = '/usr/bin',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -55,7 +90,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
     )
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
@@ -70,16 +105,49 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    
-    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
-      try_sleep = 5,
-      tries = 3,
-      user = 'ambari-qa',
-      conf_dir = '/etc/hadoop/conf',
-      security_enabled = True, 
-      keytab = '/etc/security/keytabs/smokeuser.headless.keytab',
-      bin_dir = '/usr/bin',
-      kinit_path_local = '/usr/bin/kinit'
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['delete_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        source = '/etc/passwd',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'ambari-qa',
+        action = ['create_delayed'],
+        hadoop_bin_dir = '/usr/bin',
+        type = 'file',
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -94,7 +162,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
     )
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index b759de6..a19150f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -113,72 +113,84 @@ class TestHistoryServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              mode = 0777,
-                              bin_dir = '/usr/bin'
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 01777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+
+    self.assertResourceCalled('HdfsResource', '/app-logs',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'yarn',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred/system',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mr-history/tmp',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/mr-history/done',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       group = 'hadoop',
@@ -305,72 +317,84 @@ class TestHistoryServer(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              bin_dir = '/usr/bin',
-                              mode = 0777,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 01777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+
+    self.assertResourceCalled('HdfsResource', '/app-logs',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        user = 'hdfs',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        recursive_chmod = True,
+        owner = 'yarn',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mapred/system',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/mr-history/tmp',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/mr-history/done',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'mapred',
+        group = 'hadoop',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 3c95ac2..b8f4c89 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -18,6 +18,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 '''
 from mock.mock import MagicMock, call, patch
+from resource_management.libraries.functions import version
 from stacks.utils.RMFTestCase import *
 import os
 
@@ -309,6 +310,7 @@ class TestMapReduce2Client(RMFTestCase):
                               )
     self.assertNoMoreResources()
 
+  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
     self.executeScript("2.0.6/services/YARN/package/scripts/mapreduce2_client.py",
                    classname = "MapReduce2Client",

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index bc9b831..03c91cb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -112,72 +112,6 @@ class TestNodeManager(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              bin_dir = '/usr/bin',
-                              mode = 0777,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 01777,
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               group = 'hadoop',
@@ -336,72 +270,6 @@ class TestNodeManager(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/app-logs',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              recursive_chmod = True,
-                              owner = 'yarn',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              bin_dir = '/usr/bin',
-                              mode = 0777,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'mapred',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              bin_dir = '/usr/bin',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 01777,
-                              bin_dir = '/usr/bin',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              bin_dir = '/usr/bin',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index d3b7bdb..b428b17 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -19,6 +19,7 @@ limitations under the License.
 '''
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions import version
 from resource_management.libraries.script.script import Script
 import os
 
@@ -445,6 +446,7 @@ class TestYarnClient(RMFTestCase):
     self.assertNoMoreResources()
 
 
+  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
     self.executeScript("2.0.6/services/YARN/package/scripts/yarn_client.py",
                    classname = "YarnClient",

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index 82b485b..4882be1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -465,7 +465,9 @@
         "ignore_groupsusers_create": "false",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop"
+        "user_group": "hadoop",
+        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
+        "tez_tar_source" : "/usr/hdp/current/tez-client/lib/tez.tar.gz"
       },
 
       "hbase-env": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 9b8579a..44670db 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -109,26 +109,29 @@ class TestFalconServer(RMFTestCase):
                               properties = self.getConfig()['configurations']['falcon-startup.properties'],
                               owner = 'falcon'
                               )
-    self.assertResourceCalled('HdfsDirectory', '/apps/falcon',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'falcon',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+    self.assertResourceCalled('HdfsResource', '/apps/falcon',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'falcon',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
                               owner = 'falcon',
                               recursive = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/unitTests.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/unitTests.py b/ambari-server/src/test/python/unitTests.py
index a4fd2d2..a9b233e 100644
--- a/ambari-server/src/test/python/unitTests.py
+++ b/ambari-server/src/test/python/unitTests.py
@@ -27,7 +27,8 @@ import shutil
 
 #excluded directories with non-test staff from stack and service scanning,
 #also we can add service or stack to skip here
-STACK_EXCLUDE = ["utils"]
+# FIXME: remove this once 1.3.2 stacks is deleted (not supported)
+STACK_EXCLUDE = ["utils", "1.3.2"]
 SERVICE_EXCLUDE = ["configs"]
 
 TEST_MASK = '[Tt]est*.py'

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/dependency-reduced-pom.xml b/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
new file mode 100644
index 0000000..c252f2e
--- /dev/null
+++ b/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari</groupId>
+  <artifactId>fast-hdfs-resource</artifactId>
+  <name>fast-hdfs-resource</name>
+  <version>0.0.1-SNAPSHOT</version>
+  <url>http://maven.apache.org</url>
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>2.3</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <transformers>
+                <transformer>
+                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner</mainClass>
+                </transformer>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <repositories>
+    <repository>
+      <id>hdp.internal</id>
+      <url>http://repo1.maven.org/maven2</url>
+    </repository>
+  </repositories>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+</project>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
index 691bdc6..893574a 100644
--- a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
+++ b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
@@ -152,15 +152,15 @@ public class Resource {
     if (dfs.isFile(new Path(resource.getTarget()))
         && !"file".equals(resource.getType()))
       throw new IllegalArgumentException(
-          "Cannot create a file " + resource.getTarget() +
-              " because directory is present on the given path.");
+          "Cannot create a directory " + resource.getTarget() +
+              " because file is present on the given path.");
 
     // Check consistency for ("type":"directory" == directory in hadoop)
     if (dfs.isDirectory(new Path(resource.getTarget()))
         && !"directory".equals(resource.getType()))
       throw new IllegalArgumentException(
-          "Cannot create a directory " + resource.getTarget() +
-              " because file is present on the given path.");
+          "Cannot create a file " + resource.getTarget() +
+              " because directory is present on the given path.");
 
   }
 


[2/3] ambari git commit: AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)

Posted by ao...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index c02bf74..846bab7 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -20,37 +20,15 @@ Ambari Agent
 """
 import sys
 import os.path
-import glob
 
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 
 
-def webhcat():
+def webhcat(env):
   import params
 
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
-    params.HdfsDirectory(params.webhcat_apps_dir,
-                         action="create_delayed",
-                         owner=params.webhcat_user,
-                         mode=0755
-    )
-  
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(None, action="create")
-
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,
@@ -79,55 +57,6 @@ def webhcat():
             path='/bin'
     )
 
-  # TODO, these checks that are specific to HDP 2.2 and greater should really be in a script specific to that stack.
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
-    copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user, params.user_group)
-  else:
-    CopyFromLocal(params.hadoop_streeming_jars,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-    if (os.path.isfile(params.pig_tar_file)):
-      CopyFromLocal(params.pig_tar_file,
-                    owner=params.webhcat_user,
-                    mode=0755,
-                    dest_dir=params.webhcat_apps_dir,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-
-    CopyFromLocal(params.hive_tar_file,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-    if (len(glob.glob(params.sqoop_tar_file)) > 0):
-      CopyFromLocal(params.sqoop_tar_file,
-                    owner=params.webhcat_user,
-                    mode=0755,
-                    dest_dir=params.webhcat_apps_dir,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-
   XmlConfig("webhcat-site.xml",
             conf_dir=params.config_dir,
             configurations=params.config['configurations']['webhcat-site'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index f1f9f37..d24a1da 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -31,7 +31,7 @@ class WebHCatServer(Script):
   def configure(self, env):
     import params
     env.set_params(params)
-    webhcat()
+    webhcat(env)
 
 
   def start(self, env, rolling_restart=False):

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index 30d878c..b39dc8c 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -70,6 +70,8 @@ export OOZIE_EXIT_CODE=0
 export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
 export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
+export JSON_PATH='/var/lib/ambari-agent/data/hdfs_resources.json'
+export JAR_PATH='/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
 
 if [ "$os_family" == "ubuntu" ] ; then
   LIST_PACKAGE_FILES_CMD='dpkg-query -L'
@@ -100,10 +102,33 @@ else
   kinitcmd=""
 fi
 
-sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
-sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
-sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
-sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+cat >$JSON_PATH<<EOF
+[{
+	"target":"examples",
+	"type":"directory",
+	"action":"delete"
+},
+{
+	"target":"input-data",
+	"type":"directory",
+	"action":"delete"
+},
+{
+	"target":"examples",
+	"type":"directory",
+	"action":"create",
+	"source":"$OOZIE_EXAMPLES_DIR/examples"
+},
+{
+	"target":"input-data",
+	"type":"directory",
+	"action":"create",
+	"source":"$OOZIE_EXAMPLES_DIR/examples/input-data"
+}]
+EOF
+
+echo "About to run: hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
 
 cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index dbde3ab..da69a3c 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -26,11 +26,13 @@ def oozie(is_server=False # TODO: see if see can remove this
   import params
 
   if is_server:
-    params.HdfsDirectory(params.oozie_hdfs_user_dir,
-                         action="create",
+    params.HdfsResource(params.oozie_hdfs_user_dir,
+                         type="directory",
+                         action="create_delayed",
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
     )
+    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
              recursive = True,
              owner = params.oozie_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
index 413ceb3..4c1e68e 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
@@ -146,16 +146,17 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 
 #LZO support

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
index d7bf5eb..8bb143a 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
@@ -21,6 +21,7 @@ Ambari Agent
 
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management import *
+import os
 
 # server configurations
 config = Script.get_config()
@@ -37,6 +38,9 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
+
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
@@ -53,6 +57,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 pig_env_sh_template = config['configurations']['pig-env']['content']
+fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']
@@ -62,14 +67,15 @@ pig_properties = config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index a0e04ab..b209aba 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -20,32 +20,34 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+from resource_management.libraries.functions.version import compare_versions
 
 class PigServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
 
-    input_file = 'passwd'
-    output_file = "pigsmoke.out"
+    input_file = format('/user/{smokeuser}/passwd')
+    output_dir = format('/user/{smokeuser}/pigsmoke.out')
 
-    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
-    test_cmd = format("fs -test -e {output_file}")
-
-    ExecuteHadoop( create_file_cmd,
-      tries     = 3,
-      try_sleep = 5,
-      user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir,
-      # for kinit run
-      keytab = params.smoke_user_keytab,
-      security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local,
-      bin_dir = params.hadoop_bin_dir
+    params.HdfsResource(output_dir,
+                        type="directory",
+                        action="delete_delayed",
+                        user=params.smokeuser,
+                        )
+    params.HdfsResource(input_file,
+                        type="file",
+                        action="delete_delayed",
+                        user=params.smokeuser,
+                        )
+    params.HdfsResource(input_file,
+                        type="file",
+                        source="/etc/passwd",
+                        action="create_delayed",
+                        user=params.smokeuser,
     )
+    params.HdfsResource(None, action="execute")
 
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -60,28 +62,39 @@ class PigServiceCheck(Script):
       user      = params.smokeuser
     )
 
+    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
-      user      = params.smokeuser,
+      user = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
       bin_dir = params.hadoop_bin_dir
     )
 
     if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
-      ExecuteHadoop( create_file_cmd,
-        tries     = 3,
-        try_sleep = 5,
-        user      = params.smokeuser,
-        conf_dir = params.hadoop_conf_dir,
-        # for kinit run
-        keytab = params.smoke_user_keytab,
-        security_enabled = params.security_enabled,
-        kinit_path_local = params.kinit_path_local,
-        bin_dir = params.hadoop_bin_dir
+      params.HdfsResource(output_dir,
+                          type="directory",
+                          action="delete_delayed",
+                          user=params.smokeuser,
+                          )
+      params.HdfsResource(input_file,
+                          type="file",
+                          action="delete_delayed",
+                          user=params.smokeuser,
+                          )
+      params.HdfsResource(input_file,
+                          type="file",
+                          source="/etc/passwd",
+                          action="create_delayed",
+                          user=params.smokeuser,
       )
-
-      # Check for Pig-on-Tez
-      copy_tarballs_to_hdfs('tez', params.smokeuser, params.hdfs_user, params.user_group)
+      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_delayed",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
index 13ab77f..d839acdb 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
@@ -48,20 +48,22 @@ config_dir = format("{config_dir_prefix}/conf")
 
 hadoop_home = '/usr'
 java64_home = config['hostLevelParams']['java_home']
+fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 tez_user = config['configurations']['tez-env']['tez_user']
 user_group = config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 import functools
-# Create partial functions with common arguments for every HdfsDirectory call
-# to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
+# Create partial functions with common arguments for every HdfsResource call
+# to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled=security_enabled,
   keytab=hdfs_user_keytab,
   kinit_path_local=kinit_path_local,
-  bin_dir=hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
index d2b6ee3..6e173d0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 
@@ -43,13 +42,32 @@ class HistoryServer(Script):
 
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
-      copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
+
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_delayed",
+                          source=params.mapreduce_tar_source,
+                          owner=params.hdfs_user,
+                          group=params.user_group,
+                          mode=0444,
+      )
+      params.HdfsResource(None, action="execute")
 
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_delayed",
+                          source=params.mapreduce_tar_source,
+                          owner=params.hdfs_user,
+                          group=params.user_group,
+                          mode=0444,
+      )
+      params.HdfsResource(None, action="execute")
     service('historyserver', action='start', serviceName='mapreduce')
 
   def stop(self, env, rolling_restart=False):

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index d9c73bf..f6c3ce8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -30,7 +30,10 @@ tmp_dir = Script.get_tmp_dir()
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
+
+# this is not avaliable on INSTALL action because hdp-select is not available
+hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
@@ -38,7 +41,7 @@ version = default("/commandParams/version", None)
 hostname = config['hostname']
 
 #hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '2.2') >= 0:
   yarn_role_root = "hadoop-yarn-client"
   mapred_role_root = "hadoop-mapreduce-client"
 
@@ -66,6 +69,9 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin                    = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
+
+  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
+  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
 else:
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
@@ -183,17 +189,21 @@ jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize",
 #for create_hdfs_directory
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+
+fs_root = config['configurations']['core-site']['fs.defaultFS']
+
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
index cf0d211..b784d8d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
@@ -28,37 +28,42 @@ def yarn(name = None):
   import params
 
 
-  if name in ["nodemanager","historyserver"]:
+  if name == "historyserver":
     if params.yarn_log_aggregation_enabled:
-      params.HdfsDirectory(params.yarn_nm_app_log_dir,
+      params.HdfsResource(params.yarn_nm_app_log_dir,
+                           type="directory",
                            action="create_delayed",
                            owner=params.yarn_user,
                            group=params.user_group,
                            mode=0777,
                            recursive_chmod=True
       )
-    params.HdfsDirectory("/mapred",
+    params.HdfsResource("/mapred",
+                         type="directory",
                          action="create_delayed",
                          owner=params.mapred_user
     )
-    params.HdfsDirectory("/mapred/system",
+    params.HdfsResource("/mapred/system",
+                         type="directory",
                          action="create_delayed",
                          owner=params.hdfs_user
     )
-    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+    params.HdfsResource(params.mapreduce_jobhistory_intermediate_done_dir,
+                         type="directory",
                          action="create_delayed",
                          owner=params.mapred_user,
                          group=params.user_group,
                          mode=0777
     )
 
-    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+    params.HdfsResource(params.mapreduce_jobhistory_done_dir,
+                         type="directory",
                          action="create_delayed",
                          owner=params.mapred_user,
                          group=params.user_group,
-                         mode=01777
+                         mode=0777
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
 
   if name == "nodemanager":
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -129,12 +134,14 @@ def yarn(name = None):
        group=params.user_group
     )
     if params.node_labels_dir:
-      params.HdfsDirectory(params.node_labels_dir,
-                           action="create",
+      params.HdfsResource(params.node_labels_dir,
+                           type="directory",
+                           action="create_delayed",
                            owner=params.yarn_user,
                            group=params.user_group,
                            mode=0700
       )
+      params.HdfsResource(None, action="execute")
   elif name == 'apptimelineserver':
     Directory(params.ats_leveldb_dir,
        owner=params.yarn_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 5420bc7..f0d6331 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -271,36 +271,42 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+
+    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0711,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -385,36 +391,41 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
+    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0711,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
 
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
@@ -505,35 +516,41 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2')
 
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'])
+    self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://nn1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        action = ['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://nn1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hbase',
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0711,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://nn1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 10bd583..c27a763 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -198,36 +198,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -312,36 +282,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
 
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
@@ -432,36 +372,6 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2')
 
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'])
-
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       user = 'hbase')

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 2fc8549..8ceeddf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -379,6 +379,9 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -425,6 +428,9 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index a99748f..7c774b4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -206,6 +206,9 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -244,3 +247,6 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 3981e33..99cd9eb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -88,7 +88,6 @@ class TestNamenode(RMFTestCase):
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
     )
-    self.printResources()
     self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -safemode leave',
         path = ['/usr/bin'],
         user = 'hdfs',
@@ -100,38 +99,42 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = None,
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
 
   def test_stop_default(self):
@@ -226,38 +229,42 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = None,
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
 
   def test_stop_secured(self):
@@ -323,37 +330,41 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
+    self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://ns1',
         kinit_path_local = '/usr/bin/kinit',
-        action = ['create'],
-        bin_dir = '/usr/bin',
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
 
@@ -402,37 +413,42 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
+
+    self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        conf_dir = '/etc/hadoop/conf',
-        hdfs_user = 'hdfs',
+        hadoop_fs = 'hdfs://ns1',
         kinit_path_local = '/usr/bin/kinit',
-        action = ['create'],
-        bin_dir = '/usr/bin',
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = True,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = True,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
 
@@ -494,38 +510,42 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               try_sleep = 10,
                               )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-                              )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
 
   # tests namenode start command when NameNode HA is enabled, and
@@ -574,44 +594,48 @@ class TestNamenode(RMFTestCase):
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
-                              path = ['/usr/bin'],
-                              tries = 40,
-                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
-                              user = 'hdfs',
-                              try_sleep = 10,
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/tmp',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hdfs',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0770,
-                              owner = 'ambari-qa',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
-                              )
+        path = ['/usr/bin'],
+        tries = 40,
+        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+        user = 'hdfs',
+        try_sleep = 10,
+    )
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'hdfs',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0777,
+    )
+    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+        security_enabled = False,
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_bin_dir = '/usr/bin',
+        type = 'directory',
+        action = ['create_delayed'],
+        mode = 0770,
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://ns1',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+    )
     self.assertNoMoreResources()
 
   def test_decommission_default(self):
@@ -728,6 +752,9 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -767,6 +794,9 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index e48f7f6..9d90456 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -59,36 +59,48 @@ class TestServiceCheck(RMFTestCase):
         bin_dir = '/usr/bin',
         user = 'ambari-qa',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
-        conf_dir = '/etc/hadoop/conf',
-        logoutput = True,
-        not_if = "/usr/bin/sudo su ambari-qa -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
-        try_sleep = 3,
-        tries = 5,
-        bin_dir = '/usr/bin',
-        user = 'ambari-qa',
+    self.assertResourceCalled('HdfsResource', '/tmp',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['create_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
+        mode = 0777,
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
-        conf_dir = '/etc/hadoop/conf',
-        logoutput = True,
-        try_sleep = 3,
-        tries = 5,
-        bin_dir = '/usr/bin',
-        user = 'ambari-qa',
+    self.assertResourceCalled('HdfsResource', '/tmp/',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['delete_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
-        logoutput = True,
-        tries = 5,
-        conf_dir = '/etc/hadoop/conf',
-        bin_dir = '/usr/bin',
-        try_sleep = 3,
-        user = 'ambari-qa',
+    self.assertResourceCalled('HdfsResource', '/tmp/',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        source = '/etc/passwd',
+        user = 'hdfs',
+        action = ['create_delayed'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
+        type = 'directory',
     )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
-        logoutput = True,
-        tries = 5,
-        conf_dir = '/etc/hadoop/conf',
-        bin_dir = '/usr/bin',
-        try_sleep = 3,
-        user = 'ambari-qa',
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/bin',
+        keytab = UnknownConfigurationMock(),
+        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 7ba1924..e2996fd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -230,6 +230,9 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -269,6 +272,9 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index 9fe9d03..caac26b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -63,6 +63,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -154,6 +157,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -245,6 +251,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -311,6 +320,9 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
+    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+        content = StaticFile('fast-hdfs-resource.jar'),
+    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',


[3/3] ambari git commit: AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)

Posted by ao...@apache.org.
AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df9e096f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df9e096f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df9e096f

Branch: refs/heads/trunk
Commit: df9e096f1731d1b0fb3c53e5f306101191f8195c
Parents: b7dc5dc
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Dec 29 15:39:48 2014 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Dec 29 15:39:48 2014 +0200

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |   7 +
 .../resource_management/TestContentSources.py   |   6 +-
 .../resource_management/TestCopyFromLocal.py    |  68 ---
 .../TestPropertiesFileResource.py               |  10 +-
 .../TestRepositoryResource.py                   |   4 +-
 .../TestXmlConfigResource.py                    |  10 +-
 .../python/resource_management/core/source.py   |   2 +-
 .../libraries/functions/__init__.py             |   1 +
 .../libraries/functions/get_namenode_states.py  |  50 +++
 .../libraries/functions/version.py              |  29 +-
 .../libraries/providers/__init__.py             |   3 +-
 .../libraries/providers/copy_from_local.py      |  89 ----
 .../libraries/providers/hdfs_directory.py       | 112 -----
 .../libraries/providers/hdfs_resource.py        | 109 +++++
 .../libraries/resources/__init__.py             |   3 +-
 .../libraries/resources/copy_from_local.py      |  40 --
 .../libraries/resources/hdfs_directory.py       |  45 --
 .../libraries/resources/hdfs_resource.py        |  77 ++++
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |   8 +-
 .../FALCON/0.5.0.2.1/package/scripts/params.py  |  16 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |  10 +-
 .../HBASE/0.96.0.2.0/package/scripts/params.py  |  16 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   3 +-
 .../package/files/fast-hdfs-resource.jar        | Bin 0 -> 19284916 bytes
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |   5 +
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  11 +-
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |   2 -
 .../HDFS/2.1.0.2.0/package/scripts/params.py    |  21 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |  54 +--
 .../0.12.0.2.0/package/files/templetonSmoke.sh  |  30 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |  86 +++-
 .../0.12.0.2.0/package/scripts/hive_server.py   |   9 +-
 .../0.12.0.2.0/package/scripts/install_jars.py  |  55 +--
 .../HIVE/0.12.0.2.0/package/scripts/params.py   |  66 ++-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |  73 +---
 .../package/scripts/webhcat_server.py           |   2 +-
 .../4.0.0.2.0/package/files/oozieSmoke2.sh      |  33 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   6 +-
 .../OOZIE/4.0.0.2.0/package/scripts/params.py   |  15 +-
 .../PIG/0.12.0.2.0/package/scripts/params.py    |  20 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  75 ++--
 .../TEZ/0.4.0.2.1/package/scripts/params.py     |  16 +-
 .../YARN/package/scripts/historyserver.py       |  24 +-
 .../services/YARN/package/scripts/params.py     |  28 +-
 .../2.0.6/services/YARN/package/scripts/yarn.py |  27 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     | 195 +++++----
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  90 ----
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   6 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   6 +
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 412 ++++++++++---------
 .../stacks/2.0.6/HDFS/test_service_check.py     |  70 ++--
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |   6 +
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  12 +
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 272 ++++++++----
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 134 ------
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  66 ++-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  | 112 ++++-
 .../stacks/2.0.6/YARN/test_historyserver.py     | 288 +++++++------
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   2 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 132 ------
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   2 +
 .../python/stacks/2.0.6/configs/default.json    |   4 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  43 +-
 ambari-server/src/test/python/unitTests.py      |   3 +-
 .../dependency-reduced-pom.xml                  |  42 ++
 .../ambari/fast_hdfs_resource/Resource.java     |   8 +-
 66 files changed, 1668 insertions(+), 1613 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 939c366..bee88f8 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -402,6 +402,12 @@
                 </source>
               </sources>
             </mapping>
+            <mapping>
+              <directory>/var/lib/ambari-agent/lib</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+            </mapping>
           </mappings>
         </configuration>
       </plugin>
@@ -516,6 +522,7 @@
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>${package.log.dir}</path>
+                <path>/var/lib/${project.artifactId}/lib</path>
               </paths>
               <mapper>
                 <type>perm</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-agent/src/test/python/resource_management/TestContentSources.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestContentSources.py b/ambari-agent/src/test/python/resource_management/TestContentSources.py
index 651ff83..d3a9b53 100644
--- a/ambari-agent/src/test/python/resource_management/TestContentSources.py
+++ b/ambari-agent/src/test/python/resource_management/TestContentSources.py
@@ -310,7 +310,7 @@ class TestContentSources(TestCase):
       content = template.get_content()
     self.assertEqual(open_mock.call_count, 1)
 
-    self.assertEqual(u'test template content\n', content)
+    self.assertEqual(u'test template content', content)
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     self.assertEqual(getmtime_mock.call_count, 1)
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
@@ -323,7 +323,7 @@ class TestContentSources(TestCase):
       template = InlineTemplate("{{test_arg1}} template content", [], test_arg1 = "test")
       content = template.get_content()
 
-    self.assertEqual(u'test template content\n', content)
+    self.assertEqual(u'test template content', content)
 
   def test_template_imports(self):
     """
@@ -339,4 +339,4 @@ class TestContentSources(TestCase):
     with Environment("/base") as env:
       template = InlineTemplate("{{test_arg1}} template content {{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = ["/one","two"])
       content = template.get_content()
-    self.assertEqual(u'test template content /one/two\n', content)
+    self.assertEqual(u'test template content /one/two', content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
deleted file mode 100644
index cabfab5..0000000
--- a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
+++ /dev/null
@@ -1,68 +0,0 @@
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from unittest import TestCase
-from mock.mock import patch, MagicMock
-from resource_management import *
-from resource_management.core import shell
-
-@patch.object(shell, "call", new = MagicMock(return_value=(1, "")))
-@patch.object(System, "os_family", new = 'redhat')
-class TestCopyFromLocal(TestCase):
-
-  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
-  def test_run_default_args(self, execute_hadoop_mock):
-    with Environment() as env:
-      CopyFromLocal('/user/testdir/*.files',
-        owner='user1',
-        dest_dir='/apps/test/',
-        kinnit_if_needed='',
-        hdfs_user='hdfs'
-      )
-      self.assertEqual(execute_hadoop_mock.call_count, 2)
-      call_arg_list = execute_hadoop_mock.call_args_list
-      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
-                       call_arg_list[0][0][0].command)
-      print call_arg_list[0][0][0].arguments
-      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
-                        call_arg_list[0][0][0].arguments)
-      self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
-
-
-  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
-  def test_run_with_chmod(self, execute_hadoop_mock):
-    with Environment() as env:
-      CopyFromLocal('/user/testdir/*.files',
-        mode=0655,
-        owner='user1',
-        group='hdfs',
-        dest_dir='/apps/test/',
-        kinnit_if_needed='',
-        hdfs_user='hdfs'
-      )
-      self.assertEqual(execute_hadoop_mock.call_count, 3)
-      call_arg_list = execute_hadoop_mock.call_args_list
-      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
-                       call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
-                        call_arg_list[0][0][0].arguments)
-      self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
index bdb64de..53511c0 100644
--- a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
@@ -65,7 +65,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={}
       )
 
-    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n')
+    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    ')
     ensure_mock.assert_called()
 
 
@@ -98,7 +98,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={},
       )
 
-    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    \n')
+    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    ')
     ensure_mock.assert_called()
 
 
@@ -131,7 +131,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={'property1': 'value1'},
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    \n')
+    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    ')
     ensure_mock.assert_called()
 
 
@@ -169,7 +169,7 @@ class TestPropertiesFIleResource(TestCase):
                      },
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n")
+    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    ")
     ensure_mock.assert_called()
 
 
@@ -206,5 +206,5 @@ class TestPropertiesFIleResource(TestCase):
       )
 
     read_file_mock.assert_called()
-    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    \n')
+    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    ')
     ensure_mock.assert_called()

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
index 3b83c0f..224f8b4 100644
--- a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
@@ -144,7 +144,7 @@ class TestRepositoryResource(TestCase):
       template_content = template_item[1]['content'].get_content()
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
       
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
@@ -179,7 +179,7 @@ class TestRepositoryResource(TestCase):
       template_content = template_item[1]['content'].get_content()
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
       
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(execute_mock.call_count, 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
index 4affd31..f0264d6 100644
--- a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
@@ -62,7 +62,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>\n')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -91,7 +91,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>\n')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -144,8 +144,8 @@ class TestXmlConfigResource(TestCase):
                     }
                 })
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>\n')
-
+    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>')
+  
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch.object(sudo, "create_file")
   @patch.object(os.path, "exists")
@@ -177,7 +177,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>\n')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>')
 
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch.object(os.path, "exists")

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/core/source.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/source.py b/ambari-common/src/main/python/resource_management/core/source.py
index 22e1c6d..12fd9c8 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -124,7 +124,7 @@ else:
       self.context.update(variables)
       
       rendered = self.template.render(self.context)
-      return rendered + "\n" if not rendered.endswith('\n') else rendered
+      return rendered
     
   class InlineTemplate(Template):
     def __init__(self, name, extra_imports=[], **kwargs):

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index 9b32b92..27606e2 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -33,6 +33,7 @@ from resource_management.libraries.functions.get_port_from_url import *
 from resource_management.libraries.functions.hive_check import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.format_jvm_option import *
+from resource_management.libraries.functions.get_namenode_states import *
 
 IS_WINDOWS = platform.system() == "Windows"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py b/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
new file mode 100644
index 0000000..f1dcaa2
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
@@ -0,0 +1,50 @@
+from resource_management.libraries.script import UnknownConfiguration
+
+__all__ = ["get_namenode_states", "get_active_namenode"]
+
+HDFS_NN_STATE_ACTIVE = 'active'
+HDFS_NN_STATE_STANDBY = 'standby'
+
+NAMENODE_HTTP_FRAGMENT = 'dfs.namenode.http-address.{0}.{1}'
+JMX_URI_FRAGMENT = "http://{0}/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus"
+  
+def get_namenode_states(hdfs_site):
+  active_namenodes = []
+  standby_namenodes = []
+  unknown_namenodes = []
+  
+  name_service = hdfs_site['dfs.nameservices']
+  nn_unique_ids_key = 'dfs.ha.namenodes.' + name_service
+
+  # now we have something like 'nn1,nn2,nn3,nn4'
+  # turn it into dfs.namenode.[property].[dfs.nameservices].[nn_unique_id]
+  # ie dfs.namenode.http-address.hacluster.nn1
+  nn_unique_ids = hdfs_site[nn_unique_ids_key].split(',')
+  for nn_unique_id in nn_unique_ids:
+    key = NAMENODE_HTTP_FRAGMENT.format(name_service,nn_unique_id)
+
+    if key in hdfs_site:
+      # use str() to ensure that unicode strings do not have the u' in them
+      value = str(hdfs_site[key])
+
+      try:
+        jmx_uri = JMX_URI_FRAGMENT.format(value)
+        state = get_value_from_jmx(jmx_uri,'State')
+
+        if state == HDFS_NN_STATE_ACTIVE:
+          active_namenodes.append(value)
+        elif state == HDFS_NN_STATE_STANDBY:
+          standby_namenodes.append(value)
+        else:
+          unknown_namenodes.append(value)
+      except:
+        unknown_namenodes.append(value)
+        
+  return active_namenodes, active_namenodes, unknown_namenodes
+
+def get_active_namenode(hdfs_site):
+  active_namenodes = get_namenode_states(hdfs_site)[0]
+  if active_namenodes:
+    return active_namenodes[0]
+  else:
+    return UnknownConfiguration('fs_root')
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/functions/version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version.py b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
index 1de6bf8..d9f20da 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
@@ -19,7 +19,13 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 import re
+from resource_management.core import shell
+from resource_management.core.exceptions import Fail
+from resource_management.libraries.script.config_dictionary import UnknownConfiguration
+
+__all__ = ["format_hdp_stack_version", "compare_versions", "get_hdp_build_version"]
 
 def _normalize(v, desired_segments=0):
   """
@@ -70,4 +76,25 @@ def compare_versions(version1, version2):
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   """
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
-  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
\ No newline at end of file
+  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
+
+
+def get_hdp_build_version(hdp_stack_version):
+  """
+  Used to check hdp_stack_version for stacks >= 2.2
+  :param hdp_stack_version: version for stacks >= 2.2
+  :return: checked hdp_version (or UnknownConfiguration for stacks < 2.2)
+  """
+  HDP_SELECT = "/usr/bin/hdp-select"
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.2.0.0") >= 0 and os.path.exists(HDP_SELECT):
+    code, out = shell.call('{0} status'.format(HDP_SELECT))
+
+    matches = re.findall(r"([\d\.]+\-\d+)", out)
+    hdp_version = matches[0] if matches and len(matches) > 0 else None
+
+    if not hdp_version:
+      raise Fail("Could not parse HDP version from output of hdp-select: %s" % str(out))
+
+    return hdp_version
+  else:
+    return UnknownConfiguration('hdp_version')
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index 80e0a14..5d26e4a 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -39,7 +39,6 @@ PROVIDERS = dict(
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider",
-    CopyFromLocal="resource_management.libraries.providers.copy_from_local.CopyFromLocalProvider"
+    HdfsResource="resource_management.libraries.providers.hdfs_resource.HdfsResourceProvider",
   ),
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
deleted file mode 100644
index dbd54a7..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-import os
-from resource_management import *
-
-class CopyFromLocalProvider(Provider):
-  def action_run(self):
-
-    path = self.resource.path
-    dest_dir = self.resource.dest_dir
-    dest_file = self.resource.dest_file
-    kinnit_if_needed = self.resource.kinnit_if_needed
-    owner = self.resource.owner
-    group = self.resource.group
-    mode = self.resource.mode
-    hdfs_usr=self.resource.hdfs_user
-    hadoop_conf_path = self.resource.hadoop_conf_dir
-    bin_dir = self.resource.hadoop_bin_dir
-
-
-    if dest_file:
-      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}/{dest_file}")
-      dest_path = dest_dir + dest_file if dest_dir.endswith(os.sep) else dest_dir + os.sep + dest_file
-    else:
-      dest_file_name = os.path.split(path)[1]
-      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
-      dest_path = dest_dir + os.sep + dest_file_name
-    # Need to run unless as resource user
-    
-    if kinnit_if_needed:
-      Execute(kinnit_if_needed, 
-              user=owner,
-      )
-    
-    unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls {dest_path}"), owner)
-
-    ExecuteHadoop(copy_cmd,
-                  not_if=unless_cmd,
-                  user=owner,
-                  bin_dir=bin_dir,
-                  conf_dir=hadoop_conf_path
-                  )
-
-    if not owner:
-      chown = None
-    else:
-      if not group:
-        chown = owner
-      else:
-        chown = format('{owner}:{group}')
-
-    if chown:
-      chown_cmd = format("fs -chown {chown} {dest_path}")
-
-      ExecuteHadoop(chown_cmd,
-                    user=hdfs_usr,
-                    bin_dir=bin_dir,
-                    conf_dir=hadoop_conf_path)
-    pass
-
-    if mode:
-      dir_mode = oct(mode)[1:]
-      chmod_cmd = format('fs -chmod {dir_mode} {dest_path}')
-
-      ExecuteHadoop(chmod_cmd,
-                    user=hdfs_usr,
-                    bin_dir=bin_dir,
-                    conf_dir=hadoop_conf_path)
-    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
deleted file mode 100644
index bfdb97d..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
+++ /dev/null
@@ -1,112 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import os
-
-from resource_management import *
-directories_list = [] #direcotries list for mkdir
-chmod_map = {} #(mode,recursive):dir_list map
-chown_map = {} #(owner,group,recursive):dir_list map
-class HdfsDirectoryProvider(Provider):
-  def action_create_delayed(self):
-    global delayed_directories
-    global chmod_map
-    global chown_map
-
-    if not self.resource.dir_name:
-      return
-
-    dir_name = self.resource.dir_name
-    dir_owner = self.resource.owner
-    dir_group = self.resource.group
-    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
-    directories_list.append(self.resource.dir_name)
-
-    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
-    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
-    # grouping directories by mode/owner/group to modify them in one 'chXXX' call
-    if dir_mode:
-      chmod_key = (dir_mode,recursive_chmod_str)
-      if chmod_map.has_key(chmod_key):
-        chmod_map[chmod_key].append(dir_name)
-      else:
-        chmod_map[chmod_key] = [dir_name]
-
-    if dir_owner:
-      owner_key = (dir_owner,dir_group,recursive_chown_str)
-      if chown_map.has_key(owner_key):
-        chown_map[owner_key].append(dir_name)
-      else:
-        chown_map[owner_key] = [dir_name]
-
-  def action_create(self):
-    global delayed_directories
-    global chmod_map
-    global chown_map
-
-    self.action_create_delayed()
-
-    hdp_conf_dir = self.resource.conf_dir
-    hdp_hdfs_user = self.resource.hdfs_user
-    secured = self.resource.security_enabled
-    keytab_file = self.resource.keytab
-    kinit_path = self.resource.kinit_path_local
-    bin_dir = self.resource.bin_dir
-
-    chmod_commands = []
-    chown_commands = []
-
-    for chmod_key, chmod_dirs in chmod_map.items():
-      mode = chmod_key[0]
-      recursive = chmod_key[1]
-      chmod_dirs_str = ' '.join(chmod_dirs)
-      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod {recursive} {mode} {chmod_dirs_str}"))
-
-    for chown_key, chown_dirs in chown_map.items():
-      owner = chown_key[0]
-      group = chown_key[1]
-      recursive = chown_key[2]
-      chown_dirs_str = ' '.join(chown_dirs)
-      if owner:
-        chown = owner
-        if group:
-          chown = format("{owner}:{group}")
-        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown {recursive} {chown} {chown_dirs_str}"))
-
-    if secured:
-        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
-                user=hdp_hdfs_user)
-    #create all directories in one 'mkdir' call
-    dir_list_str = ' '.join(directories_list)
-    #for hadoop 2 we need to specify -p to create directories recursively
-    parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
-
-    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
-                   chmod_cmd=' && '.join(chmod_commands),
-                   chown_cmd=' && '.join(chown_commands)),
-            user=hdp_hdfs_user,
-            path=bin_dir,
-            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}"), hdp_hdfs_user)
-    )
-
-    directories_list[:] = []
-    chmod_map.clear()
-    chown_map.clear()

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
new file mode 100644
index 0000000..c031cbc
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
@@ -0,0 +1,109 @@
+# !/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import json
+from resource_management import *
+
+JSON_PATH = '/var/lib/ambari-agent/data/hdfs_resources.json'
+JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
+
+RESOURCE_TO_JSON_FIELDS = {
+  'target': 'target',
+  'type': 'type',
+  'action': 'action',
+  'source': 'source',
+  'owner': 'owner',
+  'group': 'group',
+  'mode': 'mode',
+  'recursive_chown': 'recursiveChown',
+  'recursive_chmod': 'recursiveChmod'
+}
+
+
+class HdfsResourceProvider(Provider):
+  def action_delayed(self, action_name):
+    resource = {}
+    env = Environment.get_instance()
+    if not 'hdfs_files' in env.config:
+      env.config['hdfs_files'] = []
+
+    # Check required parameters
+    if not self.resource.type or not self.resource.action:
+      raise Fail("Resource parameter type or action is not set.")
+
+    # Put values in dictionary-resource
+    for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
+      if field_name == 'action':
+        resource[json_field_name] = action_name
+      elif field_name == 'mode' and self.resource.mode:
+        resource[json_field_name] = oct(self.resource.mode)[1:]
+      elif getattr(self.resource, field_name):
+        resource[json_field_name] = getattr(self.resource, field_name)
+
+    # Add resource to create
+    env.config['hdfs_files'].append(resource)
+
+  def action_create_delayed(self):
+    self.action_delayed("create")
+
+  def action_delete_delayed(self):
+    self.action_delayed("delete")
+
+  def action_execute(self):
+    env = Environment.get_instance()
+
+    # Check required parameters
+    if not self.resource.hadoop_fs or not self.resource.user:
+      raise Fail("Resource parameter hadoop_fs or user is not set.")
+
+    if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
+      raise Fail("No resources to create. Please perform create_delayed"
+                 " or delete_delayed before doing execute action.")
+
+    hadoop_fs = self.resource.hadoop_fs
+    hadoop_bin_dir = self.resource.hadoop_bin_dir
+    hadoop_conf_dir = self.resource.hadoop_conf_dir
+    user = self.resource.user
+    security_enabled = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+    logoutput = self.resource.logoutput
+    jar_path=JAR_PATH
+    json_path=JSON_PATH
+
+    if security_enabled:
+      Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
+              user=user
+      )
+
+    # Write json file to disk
+    with open(JSON_PATH, 'w') as f:
+      f.write(json.dumps(env.config['hdfs_files']))
+
+    # Execute jar to create/delete resources in hadoop
+    Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} {json_path} {hadoop_fs}"),
+            user=user,
+            path=[hadoop_bin_dir],
+            logoutput=logoutput,
+    )
+
+    # Clean
+    env.config['hdfs_files'] = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
index 24b497c..909d7b8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
@@ -26,5 +26,4 @@ from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_directory import *
-from resource_management.libraries.resources.copy_from_local import *
\ No newline at end of file
+from resource_management.libraries.resources.hdfs_resource import *

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
deleted file mode 100644
index 54d003e..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["CopyFromLocal"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class CopyFromLocal(Resource):
-  action = ForcedListArgument(default="run")
-
-  path = ResourceArgument(default=lambda obj: obj.name)
-  dest_dir = ResourceArgument(required=True)
-  dest_file = ResourceArgument()
-  owner = ResourceArgument(required=True)
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  kinnit_if_needed = ResourceArgument(default='')
-  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
-  hdfs_user = ResourceArgument(default='hdfs')
-  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
-
-  actions = Resource.actions + ["run"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
deleted file mode 100644
index 7888cd8..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsDirectory"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-class HdfsDirectory(Resource):
-  action = ForcedListArgument()
-
-  dir_name = ResourceArgument(default=lambda obj: obj.name)
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-
-  conf_dir = ResourceArgument()
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  hdfs_user = ResourceArgument()
-  bin_dir = ResourceArgument(default="")
-
-  #action 'create' immediately creates all pending directory in efficient manner
-  #action 'create_delayed' add directory to list of pending directories
-  actions = Resource.actions + ["create","create_delayed"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
new file mode 100644
index 0000000..21d63ae
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
@@ -0,0 +1,77 @@
+# !/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsResource"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+"""
+Calling a lot of hadoop commands takes too much time.
+The cause is that for every call new connection initialized, with datanodes, namenode.
+
+While this resource can gather the dicteroies/files to create/delete/copyFromLocal.
+And after just with one call create all that.
+
+action = create_delayed / delete_delayed. Are for gathering information  about what you want
+to create.
+
+After everything is gathered you should execute action = execute. To perform delayed actions
+
+The resource is a replacement for the following operations:
+  1) hadoop fs -rmr
+  2) hadoop fs -copyFromLocal
+  3) hadoop fs -put
+  4) hadoop fs -mkdir
+  5) hadoop fs -touchz
+  6) hadoop fs -chmod
+  7) hadoop fs -chown
+"""
+
+
+class HdfsResource(Resource):
+  # Required: {target, type, action}
+  # path to hadoop file/directory
+  target = ResourceArgument(default=lambda obj: obj.name)
+  # "directory" or "file"
+  type = ResourceArgument()
+  # "create_delayed" or "delete_delayed" or "execute"
+  action = ForcedListArgument()
+  # Required for action="execute", path to fs like "hdfs://c6401.ambari.apache.org:8020"
+  hadoop_fs = ResourceArgument()
+  # if present - copies file/directory from local path {source} to hadoop path - {target}
+  source = ResourceArgument()
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  logoutput = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  user = ResourceArgument()
+  hadoop_bin_dir = ResourceArgument()
+  hadoop_conf_dir = ResourceArgument()
+
+  #action 'execute' immediately creates all pending files/directories in efficient manner
+  #action 'create_delayed/delete_delayed' adds file/directory to list of pending directories
+  actions = Resource.actions + ["create_delayed", "delete_delayed", "execute"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index a903e5c..ce4b015 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -84,17 +84,19 @@ def falcon(type, action = None):
   if type == 'server':
     if action == 'config':
       if params.store_uri[0:4] == "hdfs":
-        params.HdfsDirectory(params.store_uri,
+        params.HdfsResource(params.store_uri,
+                             type="file",
                              action="create_delayed",
                              owner=params.falcon_user,
                              mode=0755
         )
-      params.HdfsDirectory(params.flacon_apps_dir,
+      params.HdfsResource(params.flacon_apps_dir,
+                           type="directory",
                            action="create_delayed",
                            owner=params.falcon_user,
                            mode=0777#TODO change to proper mode
       )
-      params.HdfsDirectory(None, action="create")
+      params.HdfsResource(None, action="execute")
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 recursive=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
index 82cd470..ac422c4 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
@@ -90,15 +90,17 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+fs_root = config['configurations']['core-site']['fs.defaultFS']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index ea99288..42de66d 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -135,17 +135,19 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
       group=params.user_group,
       owner=params.hbase_user
     )
-  if name in ["master","regionserver"]:
-    params.HdfsDirectory(params.hbase_hdfs_root_dir,
+  if name == "master":
+    params.HdfsResource(params.hbase_hdfs_root_dir,
+                         type="directory",
                          action="create_delayed",
                          owner=params.hbase_user
     )
-    params.HdfsDirectory(params.hbase_staging_dir,
+    params.HdfsResource(params.hbase_staging_dir,
+                         type="directory",
                          action="create_delayed",
                          owner=params.hbase_user,
                          mode=0711
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
 
 def hbase_TemplateConfig(name, 
                          tag=None

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
index 1f970fc..23c2f6c 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
@@ -137,17 +137,19 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+fs_root = config['configurations']['core-site']['fs.defaultFS']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
index 15a306b..96a93ad 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
@@ -26,9 +26,8 @@ class HbaseServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
-    
+
     output_file = "/apps/hbase/data/ambarismoketest"
-    test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_test_user};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
new file mode 100644
index 0000000..51b022a
Binary files /dev/null and b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index 25c1067..f4017c1 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -78,6 +78,11 @@ def hdfs(name=None):
        owner=tc_owner,
        content=Template("slaves.j2")
   )
+
+  # for source-code of jar goto contrib/fast-hdfs-resource
+  File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
+       content=StaticFile("fast-hdfs-resource.jar")
+  )
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 2029aac..86bcb4a 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -135,18 +135,21 @@ def create_name_dirs(directories):
 def create_hdfs_directories(check):
   import params
 
-  params.HdfsDirectory("/tmp",
+  params.HdfsResource("/tmp",
+                       type="directory",
                        action="create_delayed",
                        owner=params.hdfs_user,
                        mode=0777
   )
-  params.HdfsDirectory(params.smoke_hdfs_user_dir,
+  params.HdfsResource(params.smoke_hdfs_user_dir,
+                       type="directory",
                        action="create_delayed",
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode
   )
-  params.HdfsDirectory(None, action="create",
-                       only_if=check #skip creation when HA not active
+  params.HdfsResource(None, 
+                      action="execute",
+                      only_if=check #skip creation when HA not active
   )
 
 def format_namenode(force=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
index c650c4d..f6805a2 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
@@ -19,8 +19,6 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
-from utils import hdfs_directory
-
 
 def snamenode(action=None, format=False):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
index 1ac4446..3a9d074 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
@@ -81,6 +81,8 @@ else:
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 limits_conf_dir = "/etc/security/limits.d"
+# Path to which fast-hdfs-resource.jar will be installed
+ambari_libs_dir = "/var/lib/ambari-agent/lib"
 
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited && "
@@ -224,19 +226,22 @@ if security_enabled:
   nn_kinit_cmd = format("{kinit_path_local} -kt {_nn_keytab} {_nn_principal_name};")  
 else:
   dn_kinit_cmd = ""
-  nn_kinit_cmd = ""  
+  nn_kinit_cmd = ""
+
+fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 
 io_compression_codecs = config['configurations']['core-site']['io.compression.codecs']

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
index 3dc3a1b..c009ffb 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
@@ -31,15 +31,6 @@ class HdfsServiceCheck(Script):
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
 
-    create_dir_cmd = format("fs -mkdir {dir}")
-    chmod_command = format("fs -chmod 777 {dir}")
-    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir} fs -test -e {dir}"), params.smoke_user)
-    cleanup_cmd = format("fs -rm {tmp_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale file
-    #that needs cleanup; exit code is fn of second command
-    create_file_cmd = format(
-      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd {tmp_file}")
-    test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_user}"),
         user=params.smoke_user
@@ -52,39 +43,24 @@ class HdfsServiceCheck(Script):
                   tries=20,
                   bin_dir=params.hadoop_bin_dir
     )
-    ExecuteHadoop(create_dir_cmd,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  not_if=test_dir_exists,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
-    )
-    ExecuteHadoop(chmod_command,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    params.HdfsResource(dir,
+                        type="directory",
+                        action="create_delayed",
+                        mode=0777
     )
-    ExecuteHadoop(create_file_cmd,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    #cleanup put below to handle retries; if retrying there wil be a stale file
+    #that needs cleanup; exit code is fn of second command
+    params.HdfsResource(tmp_file,
+                        type="directory",
+                        action="delete_delayed",
     )
-    ExecuteHadoop(test_cmd,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    params.HdfsResource(tmp_file,
+                        type="directory",
+                        source="/etc/passwd",
+                        action="create_delayed"
     )
+    params.HdfsResource(None, action="execute")
+
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port
       checkWebUIFileName = "checkWebUI.py"

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
index 22202ee..e4afd43 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
@@ -20,13 +20,23 @@
 #
 #
 
+function getValueFromField {
+  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
+  return $?
+}
+
 export ttonhost=$1
 export smoke_test_user=$2
 export smoke_user_keytab=$3
 export security_enabled=$4
 export kinit_path_local=$5
+export hadoop_conf_dir=$6
 export ttonurl="http://${ttonhost}:50111/templeton/v1"
 
+export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
+export JSON_PATH='/var/lib/ambari-agent/data/hdfs_resources.json'
+export JAR_PATH='/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
+
 if [[ $security_enabled == "true" ]]; then
   kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
 else
@@ -74,11 +84,25 @@ echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
 echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
 echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
 
-#copy pig script to hdfs
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+cat >$JSON_PATH<<EOF
+[{
+	"target":"/tmp/${ttonTestScript}",
+	"type":"directory",
+	"action":"create",
+	"source":"/tmp/${ttonTestScript}"
+},
+{
+	"target":"${ttonTestInput}",
+	"type":"directory",
+	"action":"create",
+	"source":"/etc/passwd"
+}]
+EOF
 
+#copy pig script to hdfs
 #copy input file to hdfs
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+echo "About to run: hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
 
 #create, copy post args file
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 9dccb28..6fd2e93 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -19,6 +19,8 @@ limitations under the License.
 """
 
 from resource_management import *
+from install_jars import install_tez_jars
+import glob
 import sys
 import os
 
@@ -27,18 +29,96 @@ def hive(name=None):
   import params
 
   if name == 'hiveserver2':
+    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0:
+      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_delayed",
+                          source=params.mapreduce_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_delayed",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+    else:
+      install_tez_jars()
+        
+    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
+      params.HdfsResource(params.webhcat_apps_dir,
+                           type="directory",
+                           action="create_delayed",
+                           owner=params.webhcat_user,
+                           mode=0755
+      )
+  
+    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+      params.HdfsResource(params.hcat_hdfs_user_dir,
+                           type="directory",
+                           action="create_delayed",
+                           owner=params.hcat_user,
+                           mode=params.hcat_hdfs_user_mode
+      )
+    params.HdfsResource(params.webhcat_hdfs_user_dir,
+                         type="directory",
+                         action="create_delayed",
+                         owner=params.webhcat_user,
+                         mode=params.webhcat_hdfs_user_mode
+    )
+  
+    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
+      src_filename = os.path.basename(src_filepath)
+      params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content() + '/' + src_filename,
+                          type="file",
+                          action="create_delayed",
+                          source=src_filepath,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+  
+    if (os.path.isfile(params.pig_tar_source)):
+      params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
+                          type="file",
+                          action="create_delayed",
+                          source=params.pig_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+  
+    params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
+                        type="file",
+                        action="create_delayed",
+                        source=params.hive_tar_source,
+                        group=params.user_group,
+                        mode=params.tarballs_mode
+    )
 
-    params.HdfsDirectory(params.hive_apps_whs_dir,
+    for src_filepath in glob.glob(params.sqoop_tar_source):
+      src_filename = os.path.basename(src_filepath)
+      params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content() + '/' + src_filename,
+                          type="file",
+                          action="create_delayed",
+                          source=src_filepath,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+      
+    params.HdfsResource(params.hive_apps_whs_dir,
+                         type="directory",
                          action="create_delayed",
                          owner=params.hive_user,
                          mode=0777
     )
-    params.HdfsDirectory(params.hive_hdfs_user_dir,
+    params.HdfsResource(params.hive_hdfs_user_dir,
+                         type="directory",
                          action="create_delayed",
                          owner=params.hive_user,
                          mode=params.hive_hdfs_user_mode
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
 
   Directory(params.hive_conf_dir_prefix,
             mode=0755

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 12efae8..735d59d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -22,8 +22,8 @@ import hive_server_upgrade
 from resource_management import *
 from hive import hive
 from hive_service import hive_service
-from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from install_jars import install_tez_jars
+from resource_management.libraries.functions.version import compare_versions
 
 class HiveServer(Script):
 
@@ -35,7 +35,8 @@ class HiveServer(Script):
   def configure(self, env):
     import params
     env.set_params(params)
-    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0):
+    
+    if not (params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0):
       install_tez_jars()
 
     hive(name='hiveserver2')
@@ -46,10 +47,6 @@ class HiveServer(Script):
     env.set_params(params)
     self.configure(env) # FOR SECURITY
 
-    # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
-    copy_tarballs_to_hdfs('mapreduce', params.tez_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('tez', params.tez_user, params.hdfs_user, params.user_group)
-
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
index a18ca72..16c63a4 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
@@ -20,6 +20,7 @@ limitations under the License.
 
 from resource_management import *
 import os
+import glob
 
 def install_tez_jars():
   import params
@@ -29,25 +30,12 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsDirectory(hdfs_dir,
+      params.HdfsResource(hdfs_dir,
+                           type="directory",
                            action="create_delayed",
                            owner=params.tez_user,
                            mode=0755
       )
-    pass
-    params.HdfsDirectory(None, action="create")
-
-    if params.security_enabled:
-      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
-    else:
-      kinit_if_needed = ""
-
-    if kinit_if_needed:
-      Execute(kinit_if_needed,
-              user=params.tez_user,
-              path='/bin'
-      )
-    pass
 
     app_dir_path = None
     lib_dir_path = None
@@ -61,30 +49,25 @@ def install_tez_jars():
         pass
       pass
     pass
-
+  
+    tez_jars = {}
     if app_dir_path:
-      for scr_file, dest_file in params.app_dir_files.iteritems():
-        CopyFromLocal(scr_file,
-                      mode=0755,
-                      owner=params.tez_user,
-                      dest_dir=app_dir_path,
-                      dest_file=dest_file,
-                      kinnit_if_needed=kinit_if_needed,
-                      hdfs_user=params.hdfs_user,
-                      hadoop_bin_dir=params.hadoop_bin_dir,
-                      hadoop_conf_dir=params.hadoop_conf_dir
+      tez_jars[params.tez_local_api_jars] = app_dir_path
+    if lib_dir_path:
+      tez_jars[params.tez_local_lib_jars] = lib_dir_path
+
+    for src_file_regex, dest_dir in tez_jars.iteritems():
+      for src_filepath in glob.glob(src_file_regex):
+        src_filename = os.path.basename(src_filepath)
+        params.HdfsResource(format("{dest_dir}/{src_filename}"),
+                            type="file",
+                            action="create_delayed",
+                            source=src_filepath,
+                            mode=0755,
+                            owner=params.tez_user
         )
 
-    if lib_dir_path:
-      CopyFromLocal(params.tez_local_lib_jars,
-                    mode=0755,
-                    owner=params.tez_user,
-                    dest_dir=lib_dir_path,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
+    params.HdfsResource(None, action="execute")
     pass
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df9e096f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index 97af1a5..8e6eacb 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -29,15 +29,19 @@ tmp_dir = Script.get_tmp_dir()
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
+hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
+stack_is_hdp21 = hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '2.1') >= 0 and compare_versions(hdp_stack_version_major, '2.2') < 0
 
+# this is not avaliable on INSTALL action because hdp-select is not available
+hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 
+webhcat_apps_dir = "/apps/webhcat"
+
 # Hadoop params
 # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, '2.2') >=0:
   # start out with client libraries
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
@@ -60,6 +64,25 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
 
   hive_specific_configs_supported = True
+
+  # --- Tarballs ---
+
+  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
+  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
+  hadoop_streaming_tar_source = config['configurations']['cluster-env']['hadoop-streaming_tar_source']
+  sqoop_tar_source = config['configurations']['cluster-env']['sqoop_tar_source']
+  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+
+  hive_tar_destination = config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + os.path.basename(hive_tar_source)
+  pig_tar_destination = config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + os.path.basename(pig_tar_source)
+  hadoop_streaming_tar_destination_dir = config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
+  sqoop_tar_destination = config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + os.path.basename(sqoop_tar_source)
+  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
+  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
+
+  tarballs_mode = 0444
+
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
@@ -70,7 +93,7 @@ else:
   hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
   sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
 
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+  if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
@@ -80,13 +103,27 @@ else:
     
   hive_specific_configs_supported = False
 
+  # --- Tarballs ---
+  hive_tar_source = hive_tar_file
+  pig_tar_source = pig_tar_file
+  hadoop_streaming_tar_source = hadoop_streeming_jars
+  sqoop_tar_source = sqoop_tar_file
+
+  hive_tar_destination = webhcat_apps_dir + "/" + os.path.basename(hive_tar_source)
+  pig_tar_destination = webhcat_apps_dir + "/" + os.path.basename(pig_tar_source)
+  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
+  sqoop_tar_destination_dir = webhcat_apps_dir
+
+  tarballs_mode = 0755
+
+
 hadoop_conf_dir = "/etc/hadoop/conf"
 hive_conf_dir_prefix = "/etc/hive"
 hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
   hcat_conf_dir = '/etc/hcatalog/conf'
   config_dir = '/etc/hcatalog/conf'
 # for newer versions
@@ -194,7 +231,7 @@ mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
 mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
 
 ######## Metastore Schema
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
   init_metastore_schema = False
 else:
   init_metastore_schema = True
@@ -239,7 +276,6 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-app_dir_files = {tez_local_api_jars:None}
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -283,7 +319,6 @@ templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
-webhcat_apps_dir = "/apps/webhcat"
 
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
@@ -293,14 +328,15 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir = hadoop_conf_dir,
-  hdfs_user = hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user = hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_fs=fs_root,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )