You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2015/05/11 21:32:46 UTC

[1/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Repository: ambari
Updated Branches:
  refs/heads/trunk 661d143b9 -> e833066e7


Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

This reverts commit 0564f0c3ba2f82235925101a33c4316082c43e98.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20161e62
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20161e62
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20161e62

Branch: refs/heads/trunk
Commit: 20161e62769797b4780df1faccd20ab8b6f9e6da
Parents: d2ebd9c
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon May 11 15:20:32 2015 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon May 11 15:30:02 2015 -0400

----------------------------------------------------------------------
 .../YARN/2.1.0.2.0/package/scripts/params_linux.py             | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/20161e62/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 8b5db38..7437e37 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -20,8 +20,11 @@ Ambari Agent
 """
 import os
 
+<<<<<<< HEAD
 from resource_management.libraries.functions import conf_select
+=======
 from resource_management import *
+>>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import format_hdp_stack_version
@@ -98,12 +101,15 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
   mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
 
+<<<<<<< HEAD
+=======
   # the configuration direction for HDFS/YARN/MapR is the hadoop config
   # directory, which is symlinked by hadoop-client only
   hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
   tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 
+>>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin


[3/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

This reverts commit 02cd3ae9a98f9a3adaeba8e4c50203ca6e042755.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/714838d8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/714838d8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/714838d8

Branch: refs/heads/trunk
Commit: 714838d8a9984954377b37a2c55ba7d8a0199bb6
Parents: 661d143
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon May 11 15:20:16 2015 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon May 11 15:30:02 2015 -0400

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/package/scripts/params_linux.py                | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/714838d8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 6f390c1..89f0224 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -112,7 +112,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hive_tar_destination = config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + os.path.basename(hive_tar_source)
   pig_tar_destination = config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + os.path.basename(pig_tar_source)
   hadoop_streaming_tar_destination_dir = config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination_dir = config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + os.path.basename(sqoop_tar_source)
+  sqoop_tar_destination = config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + os.path.basename(sqoop_tar_source)
   mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
   tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 


[4/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.2/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/default.json b/ambari-server/src/test/python/stacks/2.2/configs/default.json
index 8188928..c5a6ae7 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/default.json
@@ -185,18 +185,7 @@
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",
             "kerberos_domain": "EXAMPLE.COM",
-            "user_group": "hadoop",
-	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "user_group": "hadoop"
         },
         "ranger-knox-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/secured.json b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
index e224ebc..5bd8814 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
@@ -173,18 +173,7 @@
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_principal_name": "ambari-qa@EXAMPLE.COM",
-            "kinit_path_local": "/usr/bin",
-	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "kinit_path_local": "/usr/bin"
         },
         "webhcat-site": {
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index 665119f..0d943c4 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -33,42 +33,35 @@ class TestMahoutClient(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
 
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              try_sleep = 5,
+                              kinit_path_local = '/usr/bin/kinit',
+                              tries = 3,
+                              user = 'ambari-qa',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              principal = UnknownConfigurationMock(),
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /user/ambari-qa/mahoutsmokeinput',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              )
     self.assertResourceCalled('File', '/tmp/sample-mahout-test.txt',
-        content = 'Test text which will be converted to sequence file.',
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput/sample-mahout-test.txt',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/tmp/sample-mahout-test.txt',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
+                              content = 'Test text which will be converted to sequence file.',
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-mahout-test.txt /user/ambari-qa/mahoutsmokeinput/',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              )
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
                                          'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
                                          '--charset utf-8',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/dependency-reduced-pom.xml b/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
deleted file mode 100644
index c252f2e..0000000
--- a/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.ambari</groupId>
-  <artifactId>fast-hdfs-resource</artifactId>
-  <name>fast-hdfs-resource</name>
-  <version>0.0.1-SNAPSHOT</version>
-  <url>http://maven.apache.org</url>
-  <build>
-    <plugins>
-      <plugin>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>2.3</version>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <transformers>
-                <transformer>
-                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner</mainClass>
-                </transformer>
-              </transformers>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <repositories>
-    <repository>
-      <id>hdp.internal</id>
-      <url>http://repo1.maven.org/maven2</url>
-    </repository>
-  </repositories>
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
-</project>
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/contrib/fast-hdfs-resource/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/pom.xml b/contrib/fast-hdfs-resource/pom.xml
deleted file mode 100644
index 6a92841..0000000
--- a/contrib/fast-hdfs-resource/pom.xml
+++ /dev/null
@@ -1,86 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <groupId>org.apache.ambari</groupId>
-  <artifactId>fast-hdfs-resource</artifactId>
-  <version>0.0.1-SNAPSHOT</version>
-  <packaging>jar</packaging>
-
-  <name>fast-hdfs-resource</name>
-  <url>http://maven.apache.org</url>
-  <repositories>
-    <repository>
-      <id>hdp.internal</id>
-      <url>http://repo1.maven.org/maven2</url>
-    </repository>
-  </repositories>
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-tools</artifactId>
-      <version>1.2.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core</artifactId>
-      <version>1.2.1</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-      <version>2.2.2</version>
-    </dependency>
-  </dependencies>
-
-
-  <!-- Create executable jar with the application entry point -->
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>2.3</version>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <transformers>
-                <transformer
-                  implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner
-                  </mainClass>
-                </transformer>
-              </transformers>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/contrib/fast-hdfs-resource/resources/example.json
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/resources/example.json b/contrib/fast-hdfs-resource/resources/example.json
deleted file mode 100644
index 605deb9..0000000
--- a/contrib/fast-hdfs-resource/resources/example.json
+++ /dev/null
@@ -1,57 +0,0 @@
-[
-{
-	"target":"/tmp/some999",
-	"type":"directory",
-	"action":"delete"
-},
-{
-	"target":"/tmp/some999/more/dirs/for/recursive/tests",
-	"type":"directory",
-	"action":"create"
-},
-{
-	"target":"/tmp/some999/more/dirs/for/recursive/tests/file_empty.txt",
-	"type":"file",
-	"action":"create"
-},
-{
-	"target":"/tmp/some999",
-	"type":"directory",
-	"action":"create",
-	"owner":"oozie"
-},
-{
-	"target":"/tmp/some999",
-	"type":"directory",
-	"action":"create",
-	"group":"hive"
-},
-{
-	"target":"/tmp/some999",
-	"type":"directory",
-	"action":"create",
-	"mode":"777"
-},
-{
-	"target":"/tmp/some999/more/dirs",
-	"type":"directory",
-	"action":"create",
-	"owner":"yarn",
-	"group":"mapred",
-	"recursiveChown":true,
-	"mode":"757",
-	"recursiveChmod":true
-},
-{
-	"source":"/tmp/my.txt",
-	"target":"/tmp/some999/my_file.txt",
-	"type":"file",
-	"action":"create"
-},
-{
-	"source":"/tmp/a",
-	"target":"/tmp/some999/a_dir",
-	"type":"directory",
-	"action":"create"
-}
-]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/contrib/fast-hdfs-resource/resources/test_perfomance.sh
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/resources/test_perfomance.sh b/contrib/fast-hdfs-resource/resources/test_perfomance.sh
deleted file mode 100644
index 40339e3..0000000
--- a/contrib/fast-hdfs-resource/resources/test_perfomance.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/sh
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# Delete 2 dirs
-sudo -u hdfs hadoop fs -rm -r /tmp/some999
-sudo -u hdfs hadoop fs -rm -r /tmp/some888
-# Create
-sudo -u hdfs hadoop fs -mkdir -p /tmp/some999/more/dirs/for/recursive/tests
-# Create + permissions + owner
-sudo -u hdfs hadoop fs -mkdir -p /tmp/some888/more/dirs/for/recursive/tests
-sudo -u hdfs hadoop fs -chown hadoop:hadoop /tmp/some888/more/dirs/for/recursive/tests
-sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/more/dirs/for/recursive/tests
-# Empty dirs with permissions/owners to last dir"
-sudo -u hdfs hadoop fs -mkdir -p /tmp/some888/and_more/and_dirs/_andfor/recursive/tests
-sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/and_more/and_dirs/_andfor/recursive/tests
-sudo -u hdfs hadoop fs -chown hadoop:hadoop /tmp/some888/and_more/and_dirs/_andfor/recursive/tests
-# Empty dirs with permissions/owners to last file
-sudo -u hdfs hadoop fs -touchz /tmp/some888/file.txt
-sudo -u hdfs hadoop fs -chown hadoop:hadoop /tmp/some888/file.txt
-sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/file.txt
-# Empty dirs with permissions/owners to last file
-sudo -u hdfs hadoop fs -touchz /tmp/some888/and_more/and_dirs/file2.txt
-sudo -u hdfs hadoop fs -chown hadoop:hadoop /tmp/some888/and_more/and_dirs/file2.txt
-sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/and_more/and_dirs/file2.txt
-# Recursive permissions
-sudo -u hdfs hadoop fs -chmod -R 700 /tmp/some888
-sudo -u hdfs hadoop fs -chown -R hive:hive /tmp/some999
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
deleted file mode 100644
index 21750e1..0000000
--- a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.fast_hdfs_resource;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.File;
-import java.lang.System;
-import java.util.ArrayList;
-import java.util.HashSet;
-
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.fs.FileSystem;
-
-/**
- * Used to: 1) copy files/directories from localFS to hadoopFs 2) create empty
- * files/directories in hadoopFs
- */
-public class Resource {
-  private String source;
-  private String target;
-  private String type;
-  private String action;
-  private String owner;
-  private String group;
-  private String mode;
-  private boolean recursiveChown;
-  private boolean recursiveChmod;
-  private boolean changePermissionforParents;
-
-  public String getSource() {
-    return source;
-  }
-
-  public void setSource(String source) {
-    this.source = source;
-  }
-
-  public String getTarget() {
-    return target;
-  }
-
-  public void setTarget(String target) {
-    this.target = target;
-  }
-
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
-  public String getAction() {
-    return action;
-  }
-
-  public void setAction(String action) {
-    this.action = action;
-  }
-
-  public String getOwner() {
-    return owner;
-  }
-
-  public void setOwner(String owner) {
-    this.owner = owner;
-  }
-
-  public String getGroup() {
-    return group;
-  }
-
-  public void setGroup(String group) {
-    this.group = group;
-  }
-
-  public String getMode() {
-    return mode;
-  }
-
-  public void setMode(String mode) {
-    this.mode = mode;
-  }
-
-  public boolean isRecursiveChown() {
-    return recursiveChown;
-  }
-
-  public void setRecursiveChown(boolean recursiveChown) {
-    this.recursiveChown = recursiveChown;
-  }
-
-  public boolean isRecursiveChmod() {
-    return recursiveChmod;
-  }
-
-  public void setRecursiveChmod(boolean recursiveChmod) {
-    this.recursiveChmod = recursiveChmod;
-  }
-  
-  public boolean isChangePermissionOnParents() {
-    return changePermissionforParents;
-  }
-
-  public void setChangePermissionOnParents(boolean changePermissionforParents) {
-    this.changePermissionforParents = changePermissionforParents;
-  }
-
-  
-  
-  
-  @Override
-  public String toString() {
-    return "Resource [source=" + source + ", target=" + target + ", type="
-        + type + ", action=" + action + ", owner=" + owner + ", group=" + group
-        + ", mode=" + mode + ", recursiveChown=" + recursiveChown
-        + ", recursiveChmod=" + recursiveChmod
-        + ", changePermissionforParents=" + changePermissionforParents + "]";
-  }
-
-  /*
-   * Check if parameters are correctly set
-   */
-  public static void checkResourceParameters(Resource resource,
-      FileSystem dfs) throws IllegalArgumentException, IOException {
-
-    ArrayList<String> actionsAvailable = new ArrayList<String>();
-    actionsAvailable.add("create");
-    actionsAvailable.add("delete");
-    ArrayList<String> typesAvailable = new ArrayList<String>();
-    typesAvailable.add("file");
-    typesAvailable.add("directory");
-
-    if (resource.getTarget() == null)
-      throw new IllegalArgumentException(
-          "Path to resource in HadoopFs must be filled.");
-
-    if (resource.getAction() == null
-        || !actionsAvailable.contains(resource.getAction()))
-      throw new IllegalArgumentException("Action is not supported.");
-
-    if (resource.getType() == null
-        || !typesAvailable.contains(resource.getType()))
-      throw new IllegalArgumentException("Type is not supported.");
-
-    // Check consistency for ("type":"file" == file in hadoop)
-    if (dfs.isFile(new Path(resource.getTarget()))
-        && !"file".equals(resource.getType()))
-      throw new IllegalArgumentException(
-          "Cannot create a directory " + resource.getTarget() +
-              " because file is present on the given path.");
-    // Check consistency for ("type":"directory" == directory in hadoop)
-    else if (dfs.isDirectory(new Path(resource.getTarget()))
-        && !"directory".equals(resource.getType()))
-      throw new IllegalArgumentException(
-          "Cannot create a file " + resource.getTarget() +
-              " because directory is present on the given path.");
-    
-    if(resource.getSource() != null) {
-      File source = new File(resource.getSource());
-      if(source.isFile()
-          && !"file".equals(resource.getType()))
-        throw new IllegalArgumentException(
-            "Cannot create a directory " + resource.getTarget() +
-                " because source " + resource.getSource() + "is a file");
-      else if(source.isDirectory()
-          && !"directory".equals(resource.getType()))
-        throw new IllegalArgumentException(
-            "Cannot create a file " + resource.getTarget() +
-                " because source " + resource.getSource() + "is a directory");      
-    }
-  }
-
-  /*
-   * Create/copy resource - {type}
-   */
-  public static void createResource(Resource resource,
-      FileSystem dfs, Path pathHadoop) throws IOException {
-
-    boolean isCreate = (resource.getSource() == null) ? true : false;
-
-    if (isCreate && resource.getType().equals("directory")) {
-      dfs.mkdirs(pathHadoop); // empty dir(s)
-    } else if (isCreate && resource.getType().equals("file")) {
-      dfs.createNewFile(pathHadoop); // empty file
-    } else {
-      dfs.copyFromLocalFile(new Path(resource.getSource()), pathHadoop);// copy
-    }
-  }
-
-  /*
-   * Set permissions on resource - {mode}
-   */
-  public static void setMode(Resource resource,
-      FileSystem dfs, Path pathHadoop) throws IOException {
-
-    if (resource.getMode() != null) {
-      FsPermission permission = new FsPermission(resource.getMode());
-      dfs.setPermission(pathHadoop, permission);
-
-      // Recursive
-      
-        // Get the list of sub-directories and files
-        HashSet<String> resultSet = new HashSet<String>();
-        
-        if (resource.isRecursiveChmod())
-          resource.fillDirectoryList(dfs, resource.getTarget(), resultSet);
-        
-        if(resource.isChangePermissionOnParents())
-          resource.fillInParentDirectories(dfs, resource.getTarget(), resultSet);
-
-        for (String path : resultSet) {
-          dfs.setPermission(new Path(path), permission);
-        }
-
-    }
-  }
-
-  /*
-   * Set owner on resource - {owner}
-   */
-  public static void setOwner(Resource resource, FileSystem dfs,
-      Path pathHadoop) throws IOException {
-
-    if (!(resource.getOwner() == null && resource.getGroup() == null)) {
-      dfs.setOwner(pathHadoop, resource.getOwner(), resource.getGroup());
-
-      // Get the list of sub-directories and files
-      HashSet<String> resultSet = new HashSet<String>();
-      if (resource.isRecursiveChown())
-        resource.fillDirectoryList(dfs, resource.getTarget(), resultSet);
-      if(resource.isChangePermissionOnParents())
-        resource.fillInParentDirectories(dfs, resource.getTarget(), resultSet);
-
-      for (String path : resultSet) {
-        dfs.setOwner(new Path(path), resource.getOwner(), resource.getGroup());
-      }
-    }
-  }
-  
-  public void fillInParentDirectories(FileSystem dfs, String path, HashSet<String> resultSet) throws IOException {
-    Path filePath = new Path(path);
-      
-    while(true) {
-      filePath = filePath.getParent();
-      
-      // if(filePath.isRoot()) {
-      if(filePath.getParent() == null) {
-        break;
-      }
-      resultSet.add(filePath.toString());
-    }
-  }
-
-  /*
-   * List all files and sub-directories recursively
-   */
-  public void fillDirectoryList(FileSystem dfs, String path,
-      HashSet<String> resultSet) throws IOException {
-
-    FileStatus[] fileStatus = dfs.listStatus(new Path(path));
-    if (fileStatus != null) {
-      // Go through all resources in directory
-      for (FileStatus fs : fileStatus) {
-        String pathToResource = path + "/" + fs.getPath().getName();
-
-        resultSet.add(pathToResource);
-
-        if (fs.isDir()) {
-          // recursive
-          fillDirectoryList(dfs, pathToResource, resultSet);
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
deleted file mode 100644
index 291a2d9..0000000
--- a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.fast_hdfs_resource;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileSystem;
-
-import com.google.gson.Gson;
-
-public class Runner {
-  public static void main(String[] args)
-      throws IOException, URISyntaxException {
-    // 1 - Check arguments
-    if (args.length != 1) {
-      System.err.println("Incorrect number of arguments. Please provide:\n"
-          + "1) Path to json file\n"
-          + "Exiting...");
-      System.exit(1);
-    }
-
-    // 2 - Check if json-file exists
-    final String jsonFilePath = args[0];
-    File file = new File(jsonFilePath);
-
-    if (!file.isFile()) {
-      System.err
-          .println("File " + jsonFilePath + " doesn't exist.\nExiting...");
-      System.exit(1);
-    }
-
-    Gson gson = new Gson();
-    Resource[] resources = null;
-    FileSystem dfs = null;
-
-    try {
-      Configuration conf = new Configuration();
-      dfs = FileSystem.get(conf);
-
-      // 3 - Load data from JSON
-      resources = (Resource[]) gson.fromJson(new FileReader(jsonFilePath),
-          Resource[].class);
-
-      // 4 - Connect to HDFS
-      System.out.println("Using filesystem uri: " + FileSystem.getDefaultUri(conf).toString());
-      dfs.initialize(FileSystem.getDefaultUri(conf), conf);
-      
-      for (Resource resource : resources) {
-        System.out.println("Creating: " + resource);
-
-        Resource.checkResourceParameters(resource, dfs);
-
-        Path pathHadoop = new Path(resource.getTarget());
-        if (resource.getAction().equals("create")) {
-          // 5 - Create
-          Resource.createResource(resource, dfs, pathHadoop);
-          Resource.setMode(resource, dfs, pathHadoop);
-          Resource.setOwner(resource, dfs, pathHadoop);
-        } else if (resource.getAction().equals("delete")) {
-          // 6 - Delete
-          dfs.delete(pathHadoop, true);
-        }
-      }
-
-    } finally {
-      dfs.close();
-    }
-
-    System.out.println("All resources created.");
-  }
-
-}


[7/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 6edca7d..bfd4e74 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -22,7 +22,6 @@ from resource_management import *
 from resource_management.libraries import functions
 import sys
 import os
-import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from urlparse import urlparse
@@ -82,96 +81,18 @@ def hive(name=None):
 
   if name == 'hiveserver2':
 
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >=0:
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-        
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
-      params.HdfsResource(params.webhcat_apps_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.webhcat_user,
-                           mode=0755
-      )
-  
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      params.HdfsResource(params.hcat_hdfs_user_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hcat_user,
-                           mode=params.hcat_hdfs_user_mode
-      )
-    params.HdfsResource(params.webhcat_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.webhcat_user,
-                         mode=params.webhcat_hdfs_user_mode
-    )
-  
-    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content() + '/' + src_filename,
-                          type="file",
-                          action="create_on_execute",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    if (os.path.isfile(params.pig_tar_source)):
-      params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.pig_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
-                        type="file",
-                        action="create_on_execute",
-                        source=params.hive_tar_source,
-                        group=params.user_group,
-                        mode=params.tarballs_mode
-    )
- 
-    for src_filepath in glob.glob(params.sqoop_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content() + '/' + src_filename,
-                          type="file",
-                          action="create_on_execute",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      
-    params.HdfsResource(params.hive_apps_whs_dir,
-                         type="directory",
-                          action="create_on_execute",
-                          owner=params.hive_user,
-                          mode=0777
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=0777
     )
-    params.HdfsResource(params.hive_hdfs_user_dir,
-                         type="directory",
-                          action="create_on_execute",
-                          owner=params.hive_user,
-                          mode=params.hive_hdfs_user_mode
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=params.hive_hdfs_user_mode
     )
-    
-    if not is_empty(params.hive_exec_scratchdir) and not urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
-      params.HdfsResource(params.hive_exec_scratchdir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hive_user,
-                           group=params.hdfs_user,
-                           mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
-      
-    params.HdfsResource(None, action="execute")
+    setup_custom_scratchdir()
+    params.HdfsDirectory(None, action="create")
 
   Directory(params.hive_etc_dir_prefix,
             mode=0755
@@ -363,3 +284,20 @@ def jdbc_connector():
   File(params.target,
        mode = 0644,
   )
+
+# In case Hive has a custom path for its HDFS temporary directory,
+# recursive directory creation will be a prerequisite as 'hive' user cannot write on the root of the HDFS
+def setup_custom_scratchdir():
+  import params
+  # If this property is custom and not a variation of the writable temp dir
+  if is_empty(params.hive_exec_scratchdir):
+    return
+  parsed = urlparse(params.hive_exec_scratchdir)
+  if parsed.path.startswith("/tmp"):
+    return
+  params.HdfsDirectory(params.hive_exec_scratchdir,
+                       action="create_delayed",
+                       owner=params.hive_user,
+                       group=params.hdfs_user,
+                       mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index a33857c..d86de3d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -23,6 +23,7 @@ from resource_management import *
 from hive import hive
 from hive_service import hive_service
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
@@ -70,6 +71,8 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
     self.configure(env) # FOR SECURITY
 
+    # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
+    copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
     setup_ranger_hive()    
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
@@ -99,14 +102,8 @@ class HiveServerDefault(HiveServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       Execute(format("hdp-select set hive-server2 {version}"))
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
+      copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
 
   def security_status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 89f0224..bd6f70e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -25,14 +25,13 @@ import os
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.os_check import OSCheck
 
-from resource_management import *
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -46,12 +45,9 @@ hostname = config["hostname"]
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
 
-# this is not avaliable on INSTALL action because hdp-select is not available
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
-
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 
@@ -99,41 +95,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   # there are no client versions of these, use server versions directly
   hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
-  
-  # --- Tarballs ---
-
-  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
-  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
-  hadoop_streaming_tar_source = config['configurations']['cluster-env']['hadoop-streaming_tar_source']
-  sqoop_tar_source = config['configurations']['cluster-env']['sqoop_tar_source']
-  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  
-  hive_tar_destination = config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + os.path.basename(hive_tar_source)
-  pig_tar_destination = config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination = config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + os.path.basename(sqoop_tar_source)
-  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
-
-  tarballs_mode = 0444
-else:
-  # --- Tarballs ---
-  hive_tar_source = hive_tar_file
-  pig_tar_source = pig_tar_file
-  hadoop_streaming_tar_source = hadoop_streeming_jars
-  sqoop_tar_source = sqoop_tar_file
-
-  webhcat_apps_dir = "/apps/webhcat"
-  
-  hive_tar_destination = webhcat_apps_dir + "/" + os.path.basename(hive_tar_source)
-  pig_tar_destination = webhcat_apps_dir + "/" + os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
-  sqoop_tar_destination_dir = webhcat_apps_dir
-
-  tarballs_mode = 0755
-
-
 
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
@@ -315,6 +276,7 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -358,6 +320,8 @@ templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
+webhcat_apps_dir = "/apps/webhcat"
+
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
 webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
@@ -366,18 +330,17 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
- HdfsResource,
-  user = hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir = hadoop_conf_dir,
+  hdfs_user = hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)
 
 # ranger host
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index c33bc90..733ab0a 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -20,9 +20,11 @@ Ambari Agent
 """
 import sys
 import os.path
+import glob
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 
@@ -44,6 +46,26 @@ def webhcat():
 def webhcat():
   import params
 
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
+    params.HdfsDirectory(params.webhcat_apps_dir,
+                         action="create_delayed",
+                         owner=params.webhcat_user,
+                         mode=0755
+    )
+  
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,
@@ -72,6 +94,55 @@ def webhcat():
             path='/bin'
     )
 
+  # TODO, these checks that are specific to HDP 2.2 and greater should really be in a script specific to that stack.
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
+    copy_tarballs_to_hdfs('hive', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('pig', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('hadoop-streaming', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('sqoop', 'hive-webhcat', params.webhcat_user, params.hdfs_user, params.user_group)
+  else:
+    CopyFromLocal(params.hadoop_streeming_jars,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (os.path.isfile(params.pig_tar_file)):
+      CopyFromLocal(params.pig_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
+    CopyFromLocal(params.hive_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (len(glob.glob(params.sqoop_tar_file)) > 0):
+      CopyFromLocal(params.sqoop_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
   # Replace _HOST with hostname in relevant principal-related properties
   webhcat_site = params.config['configurations']['webhcat-site'].copy()
   for prop_name in ['templeton.hive.properties', 'templeton.kerberos.principal']:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
index 09fb690..9b59f30 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
@@ -21,7 +21,6 @@ limitations under the License.
 from resource_management import *
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
-import time
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def webhcat_service_check():
@@ -45,33 +44,8 @@ def webhcat_service_check():
   else:
     smokeuser_keytab= "no_keytab"
     smoke_user_principal="no_principal"
-    
-  unique_name = format("{smokeuser}.{timestamp}", timestamp = time.time())
-  templeton_test_script = format("idtest.{unique_name}.pig")
-  templeton_test_input = format("/tmp/idtest.{unique_name}.in")
-  templeton_test_output = format("/tmp/idtest.{unique_name}.out")
 
-  File(format("{tmp_dir}/{templeton_test_script}"),
-       content = Template("templeton_smoke.pig.j2", templeton_test_input=templeton_test_input, templeton_test_output=templeton_test_output),
-  )
-  
-  params.HdfsResource(format("/tmp/{templeton_test_script}"),
-                      action = "create_on_execute",
-                      type = "file",
-                      source = format("{tmp_dir}/{templeton_test_script}"),
-                      owner = params.smokeuser
-  )
-  
-  params.HdfsResource(templeton_test_input,
-                      action = "create_on_execute",
-                      type = "file",
-                      source = "/etc/passwd",
-                      owner = params.smokeuser
-  )
-  
-  params.HdfsResource(None, action = "execute")
-
-  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {templeton_port} {templeton_test_script} {smokeuser_keytab}"
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {templeton_port} {smokeuser_keytab}"
                " {security_param} {kinit_path_local} {smoke_user_principal}")
 
   Execute(cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2 b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
deleted file mode 100644
index 3153e81..0000000
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-A = load '{{templeton_test_input}}' using PigStorage(':');
-B = foreach A generate \$0 as id; 
-store B into '{{templeton_test_output}}';
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index c7d13eb..47ce98a 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -18,14 +18,13 @@ limitations under the License.
 Ambari Agent
 
 """
-from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -69,14 +68,14 @@ java64_home = config['hostLevelParams']['java_home']
 log4j_props = config['configurations']['mahout-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
index da93c35..f46c41f 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -20,34 +20,55 @@ Ambari Agent
 """
 
 from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 
 class MahoutServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
 
+    create_input_dir_cmd = format("fs -mkdir /user/{smokeuser}/mahoutsmokeinput")
+    copy_test_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
     mahout_command = format("mahout seqdirectory --input /user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
                             "--output /user/{smokeuser}/mahoutsmokeoutput/ --charset utf-8")
     test_command = format("fs -test -e /user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
-    
+    remove_output_input_dirs_cmd = format("fs -rm -r -f /user/{smokeuser}/mahoutsmokeoutput "
+                                          "/user/{smokeuser}/mahoutsmokeinput")
+
+    ExecuteHadoop( remove_output_input_dirs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   # for kinit run
+                   keytab = params.smoke_user_keytab,
+                   principal = params.smokeuser_principal,
+                   security_enabled = params.security_enabled,
+                   kinit_path_local = params.kinit_path_local,
+                   bin_dir = params.hadoop_bin_dir
+                   )
+
+    ExecuteHadoop( create_input_dir_cmd,
+                 tries = 3,
+                 try_sleep = 5,
+                 user = params.smokeuser,
+                 conf_dir = params.hadoop_conf_dir,
+                 bin_dir = params.hadoop_bin_dir
+    )
+
     File( format("{tmp_dir}/sample-mahout-test.txt"),
         content = "Test text which will be converted to sequence file.",
         mode = 0755
     )
-    
-    params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput"),
-                        action="create_on_execute",
-                        type="directory",
-                        owner=params.smokeuser,
-    )
-    params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt"),
-                        action="create_on_execute",
-                        type="file",
-                        owner=params.smokeuser,
-                        source=format("{tmp_dir}/sample-mahout-test.txt")
+
+    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
-    
+
     Execute( mahout_command,
              tries = 3,
              try_sleep = 5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index fd76321..32211c4 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -24,14 +24,13 @@ export os_family=$1
 export oozie_lib_dir=$2
 export oozie_conf_dir=$3
 export oozie_bin_dir=$4
-export oozie_examples_dir=$5
-export hadoop_conf_dir=$6
-export hadoop_bin_dir=$7
-export smoke_test_user=$8
-export security_enabled=$9
-export smoke_user_keytab=$10
-export kinit_path_local=${11}
-export smokeuser_principal=${12}
+export hadoop_conf_dir=$5
+export hadoop_bin_dir=$6
+export smoke_test_user=$7
+export security_enabled=$8
+export smoke_user_keytab=$9
+export kinit_path_local=${10}
+export smokeuser_principal=${11}
 
 function getValueFromField {
   xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
@@ -68,9 +67,37 @@ function checkOozieJobStatus {
 }
 
 export OOZIE_EXIT_CODE=0
+export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
+export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.base.url | tr '[:upper:]' '[:lower:]'`
 
-cd $oozie_examples_dir
+# search for the oozie examples JAR and, if found, store the directory name
+export OOZIE_EXAMPLES_DIR=`find "${oozie_lib_dir}/" -name "oozie-examples.tar.gz" | xargs dirname`
+if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
+  if [ "$os_family" == "ubuntu" ] ; then
+    LIST_PACKAGE_FILES_CMD='dpkg-query -L'
+  else
+    LIST_PACKAGE_FILES_CMD='rpm -ql'
+  fi
+  export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
+fi
+if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
+  export OOZIE_EXAMPLES_DIR='/usr/hdp/current/oozie-client/doc/'
+else
+  echo "Located Oozie examples JAR at $OOZIE_EXAMPLES_DIR"
+fi
+
+cd $OOZIE_EXAMPLES_DIR
+
+/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
+/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
+
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
 
 if [[ $security_enabled == "True" ]]; then
   kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smokeuser_principal}; "
@@ -78,7 +105,12 @@ else
   kinitcmd=""
 fi
 
-cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $oozie_examples_dir/examples/apps/map-reduce/job.properties  -run"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+
+cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd
 job_info=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "$cmd" | grep "job:"`
 job_id="`echo $job_info | cut -d':' -f2`"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
deleted file mode 100644
index 79a1bfc..0000000
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env bash
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export oozie_conf_dir=$1
-export oozie_examples_dir=$2
-export hadoop_conf_dir=$3
-
-function getValueFromField {
-  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
-  return $?
-}
-
-export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
-
-cd $oozie_examples_dir
-
-/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
-/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
-
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index ae66eb0..6adf2ac 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -85,13 +85,11 @@ def oozie(is_server=False):
   import params
 
   if is_server:
-    params.HdfsResource(params.oozie_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
     )
-    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
              recursive = True,
              owner = params.oozie_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
index 47361c4..c3d9c78 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
@@ -185,16 +185,12 @@ def upgrade_oozie():
     command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
     Execute(command, user=params.oozie_user)
 
-  
-  params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
-                      action = "create_on_execute",
-                      type = "directory",
-                      owner = "oozie",
-                      group = "hadoop",
-                      mode = 0755,
-                      recursive_chmod = True
-  )
-  params.HdfsResource(None, action = "execute")
+  # ensure that HDFS is prepared to receive the new sharelib
+  command = format("hdfs dfs -chown oozie:hadoop {oozie_hdfs_user_dir}/share")
+  Execute(command, user=params.oozie_user)
+
+  command = format("hdfs dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
+  Execute(command, user=params.oozie_user)
 
   # upgrade oozie DB
   command = format("{oozie_home}/bin/ooziedb.sh upgrade -run")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index e8116d2..6909cd7 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -17,7 +17,6 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-from resource_management import *
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
@@ -26,7 +25,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 
 import status_params
@@ -69,7 +68,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   oozie_shared_lib = format("/usr/hdp/current/{oozie_root}/share")
   oozie_home = format("/usr/hdp/current/{oozie_root}")
   oozie_bin_dir = format("/usr/hdp/current/{oozie_root}/bin")
-  oozie_examples_regex = format("/usr/hdp/current/{oozie_root}/doc")
   falcon_home = '/usr/hdp/current/falcon-client'
 
   conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
@@ -90,7 +88,6 @@ else:
   falcon_home = '/usr/lib/falcon'
   conf_dir = "/etc/oozie/conf"
   hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
-  oozie_examples_regex = "/usr/share/doc/oozie-*"
 
 execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
 
@@ -198,18 +195,17 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)
 
 # The logic for LZO also exists in HDFS' params.py
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
index 6cdd362..d589452 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
@@ -26,7 +26,6 @@ from resource_management.libraries.script import Script
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 import os
-import glob
 
 
 class OozieServiceCheck(Script):
@@ -40,53 +39,30 @@ class OozieServiceCheckDefault(OozieServiceCheck):
     env.set_params(params)
 
     # on HDP1 this file is different
-    prepare_hdfs_file_name = 'prepareOozieHdfsDirectories.sh'
     smoke_test_file_name = 'oozieSmoke2.sh'
 
-    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name, prepare_hdfs_file_name)
+    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name)
 
   @staticmethod
-  def oozie_smoke_shell_file(file_name, prepare_hdfs_file_name):
+  def oozie_smoke_shell_file(file_name):
     import params
 
     File(format("{tmp_dir}/{file_name}"),
          content=StaticFile(file_name),
          mode=0755
     )
-    File(format("{tmp_dir}/{prepare_hdfs_file_name}"),
-         content=StaticFile(prepare_hdfs_file_name),
-         mode=0755
-    )
 
     os_family = System.get_instance().os_family
-    oozie_examples_dir = glob.glob(params.oozie_examples_regex)[0]
-    
-    Execute(format("{tmp_dir}/{prepare_hdfs_file_name} {conf_dir} {oozie_examples_dir} {hadoop_conf_dir} "),
-            tries=3,
-            try_sleep=5,
-            logoutput=True
-    )
-    
-    params.HdfsResource(format('/user/{smokeuser}/examples'),
-      action = "create_on_execute",
-      type = "directory",
-      source = format("{oozie_examples_dir}/examples"),
-    )
-    params.HdfsResource(format('/user/{smokeuser}/input-data'),
-      action = "create_on_execute",
-      type = "directory",
-      source = format("{oozie_examples_dir}/examples/input-data"),
-    )
-    params.HdfsResource(None, action="execute")
 
     if params.security_enabled:
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
     else:
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled}")
-    
-    Execute(sh_cmd,
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} {oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} {security_enabled}")
+
+    Execute(format("{tmp_dir}/{file_name}"),
+            command=sh_cmd,
             path=params.execute_path,
             tries=3,
             try_sleep=5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index ddb829e..93c8c71 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -18,15 +18,13 @@ limitations under the License.
 Ambari Agent
 
 """
-from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
-import os
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -53,10 +51,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
-  
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
-
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -77,15 +71,14 @@ pig_properties = config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index 5f33b1d..9de30ed 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -20,6 +20,7 @@ Ambari Agent
 """
 
 from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries import functions
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -33,23 +34,26 @@ class PigServiceCheckLinux(PigServiceCheck):
     import params
     env.set_params(params)
 
-    input_file = format('/user/{smokeuser}/passwd')
-    output_dir = format('/user/{smokeuser}/pigsmoke.out')
-
-    params.HdfsResource(output_dir,
-                        type="directory",
-                        action="delete_on_execute",
-                        user=params.smokeuser,
-                        )
-    params.HdfsResource(input_file,
-                        type="file",
-                        source="/etc/passwd",
-                        action="create_on_execute",
-                        user=params.smokeuser,
-    )
-    params.HdfsResource(None, action="execute")
- 
+    input_file = 'passwd'
+    output_file = "pigsmoke.out"
+
+    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
+    #cleanup put below to handle retries; if retrying there wil be a stale file that needs cleanup; exit code is fn of second command
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command needs hadoop
+    test_cmd = format("fs -test -e {output_file}")
 
+    ExecuteHadoop( create_file_cmd,
+      tries     = 3,
+      try_sleep = 5,
+      user      = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      # for kinit run
+      keytab = params.smoke_user_keytab,
+      principal = params.smokeuser_principal,
+      security_enabled = params.security_enabled,
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
+    )
 
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -64,7 +68,6 @@ class PigServiceCheckLinux(PigServiceCheck):
       user      = params.smokeuser
     )
 
-    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
@@ -73,27 +76,21 @@ class PigServiceCheckLinux(PigServiceCheck):
 
     if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
-      params.HdfsResource(output_dir,
-                          type="directory",
-                          action="delete_on_execute",
-                          user=params.smokeuser,
-      )
-      params.HdfsResource(input_file,
-                          type="file",
-                          source="/etc/passwd",
-                          action="create_on_execute",
-                          user=params.smokeuser,
+      ExecuteHadoop( create_file_cmd,
+        tries     = 3,
+        try_sleep = 5,
+        user      = params.smokeuser,
+        conf_dir = params.hadoop_conf_dir,
+        # for kinit run
+        keytab = params.smoke_user_keytab,
+        principal = params.smokeuser_principal,
+        security_enabled = params.security_enabled,
+        kinit_path_local = params.kinit_path_local,
+        bin_dir = params.hadoop_bin_dir
       )
 
       # Check for Pig-on-Tez
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('tez', 'hadoop-client', params.smokeuser, params.hdfs_user, params.user_group)
 
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
index 8c05a8e..0206415 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
@@ -22,6 +22,7 @@ import sys
 import os
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.core.resources import Execute
@@ -76,14 +77,7 @@ class JobHistoryServer(Script):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
       Execute(format("hdp-select set spark-historyserver {version}"))
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, params.hdfs_user, params.user_group)
 
 if __name__ == "__main__":
   JobHistoryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 2600141..4130472 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -23,7 +23,6 @@ import status_params
 
 from setup_spark import *
 
-from resource_management import *
 import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
@@ -31,7 +30,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # a map of the Ambari role to the component name
 # for use with /usr/hdp/current/<component>
@@ -70,8 +69,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_home = format("/usr/hdp/current/{component_directory}")
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 
 
 java_home = config['hostLevelParams']['java_home']
@@ -158,14 +155,14 @@ if security_enabled:
 
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 5c01337..80d6bbc 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -37,13 +37,11 @@ def setup_spark(env, type, action = None):
             recursive=True
   )
   if type == 'server' and action == 'config':
-    params.HdfsResource(params.spark_hdfs_user_dir,
-                       type="directory",
-                       action="create_on_execute",
+    params.HdfsDirectory(params.spark_hdfs_user_dir,
+                       action="create",
                        owner=params.spark_user,
                        mode=0775
     )
-    params.HdfsResource(None, action="execute")
     
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index 840b81d..26a403d 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -27,14 +27,7 @@ def spark_service(action):
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
 
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+    copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, params.hdfs_user, params.user_group)
 
     no_op_test = format(
       'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_history_server_pid_file}` >/dev/null 2>&1')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
index 2c7bbc4..5aadd99 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
@@ -62,10 +62,12 @@ export STORM_LOG_DIR={{log_dir}}
     </value>
   </property>
 
-   <property>
-     <name>nimbus_seeds_supported</name>
-     <value>false</value>
-     <description></description>
-   </property>
+  <configuration>
+    <property>
+      <name>nimbus_seeds_supported</name>
+      <value>false</value>
+      <description></description>
+    </property>
+  </configuration>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 7e80f99..7f17a26 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -19,14 +19,13 @@ limitations under the License.
 """
 import os
 
-from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -76,17 +75,15 @@ user_group = config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+# Create partial functions with common arguments for every HdfsDirectory call
+# to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled=security_enabled,
+  keytab=hdfs_user_keytab,
+  kinit_path_local=kinit_path_local,
+  bin_dir=hadoop_bin_dir
 )
 
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index abff479..6786eba 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -21,6 +21,7 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 
@@ -37,27 +38,53 @@ class TezServiceCheckLinux(TezServiceCheck):
       hdp_version = functions.get_hdp_version("hadoop-client")
 
     path_to_tez_jar = format(params.path_to_tez_examples_jar)
+    copy_test_file_to_hdfs_cmd =  format("fs -put {tmp_dir}/sample-tez-test /tmp/tezsmokeinput/")
+    create_input_dir_cmd = format("fs -mkdir /tmp/tezsmokeinput")
     wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
                                "/tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
     test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
-    
+    remove_output_input_dirs_cmd = "fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput"
+
+
+    ExecuteHadoop( remove_output_input_dirs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   # for kinit run
+                   keytab = params.smoke_user_keytab,
+                   principal = params.smokeuser_principal,
+                   security_enabled = params.security_enabled,
+                   kinit_path_local = params.kinit_path_local,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    params.HdfsDirectory("/tmp",
+                         action="create",
+                         owner=params.hdfs_user,
+                         mode=0777
+    )
+
+    ExecuteHadoop( create_input_dir_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
     File( format("{tmp_dir}/sample-tez-test"),
           content = "foo\nbar\nfoo\nbar\nfoo",
           mode = 0755
     )
-    
-    params.HdfsResource("/tmp/tezsmokeinput",
-                        action="create_on_execute",
-                        type="directory",
-                        owner=params.smokeuser,
-    )
-    params.HdfsResource("/tmp/tezsmokeinput/sample-tez-test",
-                        action="create_on_execute",
-                        type="file",
-                        owner=params.smokeuser,
-                        source=format("{tmp_dir}/sample-tez-test"),
+
+    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
 
     ExecuteHadoop( wordcount_command,
                    tries = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index e625a0d..f4dceb5 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -21,6 +21,7 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -72,33 +73,13 @@ class HistoryServerDefault(HistoryServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
-
+      copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
 
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
-
+    copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
index 44015bf..a18ca72 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management import *
 import os
-import glob
 
 def install_tez_jars():
   import params
@@ -30,12 +29,25 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsResource(hdfs_dir,
-                           type="directory",
-                           action="create_on_execute",
+      params.HdfsDirectory(hdfs_dir,
+                           action="create_delayed",
                            owner=params.tez_user,
                            mode=0755
       )
+    pass
+    params.HdfsDirectory(None, action="create")
+
+    if params.security_enabled:
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
+    else:
+      kinit_if_needed = ""
+
+    if kinit_if_needed:
+      Execute(kinit_if_needed,
+              user=params.tez_user,
+              path='/bin'
+      )
+    pass
 
     app_dir_path = None
     lib_dir_path = None
@@ -50,34 +62,30 @@ def install_tez_jars():
       pass
     pass
 
-    tez_jars = {}
     if app_dir_path:
-      tez_jars[params.tez_local_api_jars] = app_dir_path
+      for scr_file, dest_file in params.app_dir_files.iteritems():
+        CopyFromLocal(scr_file,
+                      mode=0755,
+                      owner=params.tez_user,
+                      dest_dir=app_dir_path,
+                      dest_file=dest_file,
+                      kinnit_if_needed=kinit_if_needed,
+                      hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
+                      hadoop_conf_dir=params.hadoop_conf_dir
+        )
+
     if lib_dir_path:
-      tez_jars[params.tez_local_lib_jars] = lib_dir_path
-
-    for src_file_regex, dest_dir in tez_jars.iteritems():
-      for src_filepath in glob.glob(src_file_regex):
-        src_filename = os.path.basename(src_filepath)
-        params.HdfsResource(format("{dest_dir}/{src_filename}"),
-                            type="file",
-                            action="create_on_execute",
-                            source=src_filepath,
-                            mode=0755,
-                            owner=params.tez_user
-         )
-        
-    for src_file_regex, dest_dir in tez_jars.iteritems():
-      for src_filepath in glob.glob(src_file_regex):
-        src_filename = os.path.basename(src_filepath)
-        params.HdfsResource(format("{dest_dir}/{src_filename}"),
-                            type="file",
-                            action="create_on_execute",
-                            source=src_filepath,
-                            mode=0755,
-                            owner=params.tez_user
-         )
-    params.HdfsResource(None, action="execute")
+      CopyFromLocal(params.tez_local_lib_jars,
+                    mode=0755,
+                    owner=params.tez_user,
+                    dest_dir=lib_dir_path,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+    pass
 
 
 def get_tez_hdfs_dir_paths(tez_lib_uris = None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
index 35fd71c..6d17aca 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
@@ -117,23 +117,34 @@ class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
 
+    cleanup_cmd = format("fs -rm -r -f {output_file} {input_file}")
+    create_file_cmd = format("fs -put /etc/passwd {input_file}")
     test_cmd = format("fs -test -e {output_file}")
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
 
-    params.HdfsResource(input_file,
-                        action = "create_on_execute",
-                        type = "file",
-                        source = "/etc/passwd",
-    )
-    params.HdfsResource(None, action="execute")
-
     if params.security_enabled:
       kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
 
       Execute(kinit_cmd,
               user=params.smokeuser
       )
-      
+
+    ExecuteHadoop(cleanup_cmd,
+                  tries=1,
+                  try_sleep=5,
+                  user=params.smokeuser,
+                  bin_dir=params.execute_path,
+                  conf_dir=params.hadoop_conf_dir
+    )
+
+    ExecuteHadoop(create_file_cmd,
+                  tries=1,
+                  try_sleep=5,
+                  user=params.smokeuser,
+                  bin_dir=params.execute_path,
+                  conf_dir=params.hadoop_conf_dir
+    )
+
     ExecuteHadoop(run_wordcount_job,
                   tries=1,
                   try_sleep=5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 7437e37..ee454b6 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -20,16 +20,13 @@ Ambari Agent
 """
 import os
 
-<<<<<<< HEAD
 from resource_management.libraries.functions import conf_select
-=======
-from resource_management import *
->>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 import status_params
 
@@ -55,8 +52,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
@@ -76,6 +72,7 @@ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
+
   # MapR directory root
   mapred_role_root = "hadoop-mapreduce-client"
   command_role = default("/role", "")
@@ -97,19 +94,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
-  
-  mapreduce_tar_source = config['configurations']['cluster-env']['mapreduce_tar_source']
-  mapreduce_tar_destination = config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + "/" + os.path.basename(mapreduce_tar_source)
-
-<<<<<<< HEAD
-=======
-  # the configuration direction for HDFS/YARN/MapR is the hadoop config
-  # directory, which is symlinked by hadoop-client only
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + os.path.basename(tez_tar_source)
 
->>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
@@ -236,17 +221,17 @@ tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
+  bin_dir = hadoop_bin_dir
+)
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index 9840c70..9d5fb97 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -22,6 +22,7 @@ Ambari Agent
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
@@ -103,18 +104,11 @@ class ResourcemanagerDefault(Resourcemanager):
     self.configure(env) # FOR SECURITY
     if params.is_supported_yarn_ranger:
       setup_ranger_yarn() #Ranger Yarn Plugin related calls 
-    if not Script.is_hdp_stack_greater_or_equal("2.2"):
+    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.1') == 0:
       install_tez_jars()
     else:
       # will work only for stack versions >=2.2
-      params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', params.tez_user, params.hdfs_user, params.user_group)
     service('resourcemanager', action='start')
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
index 8e378b5..81da809 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
@@ -84,7 +84,7 @@ class ServiceCheckDefault(ServiceCheck):
     import params
     env.set_params(params)
 
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
+    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       path_to_distributed_shell_jar = "/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell.jar"
     else:
       path_to_distributed_shell_jar = "/usr/lib/hadoop-yarn/hadoop-yarn-applications-distributedshell*.jar"


[6/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index 1e75eb0..583e6e9 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -61,35 +61,37 @@ def yarn(name = None):
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def yarn(name = None):
   import params
-  if name == "historyserver":
+  if name in ["nodemanager","historyserver"]:
     if params.yarn_log_aggregation_enabled:
-      params.HdfsResource(params.yarn_nm_app_log_dir,
-                           action="create_on_execute",
-                           type="directory",
+      params.HdfsDirectory(params.yarn_nm_app_log_dir,
+                           action="create_delayed",
                            owner=params.yarn_user,
                            group=params.user_group,
                            mode=0777,
                            recursive_chmod=True
       )
-    params.HdfsResource("/mapred",
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory("/mapred",
+                         action="create_delayed",
                          owner=params.mapred_user
     )
-    params.HdfsResource("/mapred/system",
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory("/mapred/system",
+                         action="create_delayed",
                          owner=params.hdfs_user
     )
-    params.HdfsResource(params.mapreduce_jobhistory_done_dir,
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+                         action="create_delayed",
                          owner=params.mapred_user,
                          group=params.user_group,
-                         change_permissions_for_parents=True,
                          mode=0777
     )
-    params.HdfsResource(None, action="execute")
+
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=01777
+    )
+    params.HdfsDirectory(None, action="create")
 
   if name == "nodemanager":
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -176,14 +178,12 @@ def yarn(name = None):
        group=params.user_group
     )
     if not is_empty(params.node_label_enable) and params.node_label_enable or is_empty(params.node_label_enable) and params.node_labels_dir:
-      params.HdfsResource(params.node_labels_dir,
-                           type="directory",
-                           action="create_on_execute",
+      params.HdfsDirectory(params.node_labels_dir,
+                           action="create",
                            owner=params.yarn_user,
                            group=params.user_group,
                            mode=0700
       )
-      params.HdfsResource(None, action="execute")
   elif name == 'apptimelineserver':
     Directory(params.ats_leveldb_dir,
        owner=params.yarn_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index cc404eb..b56d15a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -280,42 +280,36 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-
-    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -407,41 +401,36 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-    self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
@@ -539,38 +528,35 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2')
 
-    self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 3175a01..ea8d9d4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -207,6 +207,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -298,6 +328,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
@@ -393,6 +453,37 @@ class TestHbaseRegionServer(RMFTestCase):
                               group='hadoop',
                               owner='hbase',
                               content='log4jproperties\nline2')
+
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
+
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-regionserver/conf start regionserver',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       user = 'hbase')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 9b7a91f..8b7846c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -380,9 +380,6 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -434,9 +431,6 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index d05e2dc..9975b36 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -209,9 +209,6 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -250,9 +247,6 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
 
 
   @patch('time.sleep')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 488e1a2..2e7af32 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -100,40 +100,38 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               try_sleep = 10,
                               )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        only_if = None,
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
     pass
 
@@ -207,40 +205,38 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        only_if = None,
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
 
   def test_stop_default(self):
@@ -336,40 +332,38 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        only_if = None,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
 
   def test_stop_secured(self):
@@ -435,39 +429,37 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
         keytab = UnknownConfigurationMock(),
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
+        action = ['create'],
+        bin_dir = '/usr/bin',
         only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
 
@@ -516,39 +508,37 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
+        action = ['create'],
+        bin_dir = '/usr/bin',
         only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
 
@@ -606,40 +596,38 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               try_sleep = 10,
                               )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+                              )
     self.assertNoMoreResources()
 
   # tests namenode start command when NameNode HA is enabled, and
@@ -693,40 +681,38 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               try_sleep = 10,
                               )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+                              )
     self.assertNoMoreResources()
 
   def test_decommission_default(self):
@@ -843,9 +829,6 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -885,9 +868,6 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index 9d6d165..d683198 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -221,9 +221,6 @@ class TestNFSGateway(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
@@ -256,9 +253,6 @@ class TestNFSGateway(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
 
 
   @patch("resource_management.libraries.functions.security_commons.build_expectations")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index 36abd40..3c098b6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -59,45 +59,36 @@ class TestServiceCheck(RMFTestCase):
         bin_dir = '/usr/bin',
         user = 'hdfs',
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
         user = 'hdfs',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        mode = 0777,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
         user = 'hdfs',
-        action = ['delete_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
     )
-    self.assertResourceCalled('HdfsResource', '/tmp/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
+        logoutput = True,
+        tries = 5,
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        try_sleep = 3,
         user = 'hdfs',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
     )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
+        logoutput = True,
+        tries = 5,
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        try_sleep = 3,
         user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
-    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 8630fcf..5bedf5b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -230,9 +230,6 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -279,9 +276,6 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index e34258b..8e7f34f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -64,9 +64,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -158,9 +155,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -251,9 +245,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -316,9 +307,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 9a98a85..670092d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -22,19 +22,16 @@ import socket
 import subprocess
 
 from mock.mock import MagicMock, patch
-from resource_management.libraries.functions import version
 from resource_management.core import shell
-from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import dynamic_variable_interpretation
 from stacks.utils.RMFTestCase import *
 
-
-@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.0.0.0-1234"))
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
 class TestHiveServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
@@ -47,7 +44,7 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
 
   @patch("socket.socket")
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+  @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
   def test_start_default(self, socket_mock):
     s = socket_mock.return_value
 
@@ -86,8 +83,9 @@ class TestHiveServer(RMFTestCase):
                               )
     self.assertNoMoreResources()
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
-  def test_start_default_no_copy(self):
+
+  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
+  def test_start_default_no_copy(self, get_tar_mock):
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
@@ -97,6 +95,7 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
 
+    get_tar_mock.return_value = ("a", "b")
     self.assert_configure_default()
 
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -119,9 +118,10 @@ class TestHiveServer(RMFTestCase):
                               timeout = 30,
                               )
     self.assertNoMoreResources()
+    self.assertFalse(get_tar_mock.called)
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
-  def test_start_default_alt_tmp(self):
+  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
+  def test_start_default_alt_tmp(self, get_tar_mock):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        command = "start",
@@ -130,6 +130,7 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
 
+    get_tar_mock.return_value = ("a", "b")
     self.assert_configure_default(no_tmp=True)
 
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -152,10 +153,11 @@ class TestHiveServer(RMFTestCase):
                               timeout = 30,
                               )
     self.assertNoMoreResources()
+    self.assertFalse(get_tar_mock.called)
 
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
-  def test_start_default_alt_nn_ha_tmp(self):
+  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
+  def test_start_default_alt_nn_ha_tmp(self, get_tar_mock):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
                        command = "start",
@@ -164,6 +166,7 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
 
+    get_tar_mock.return_value = ("a", "b")
     self.assert_configure_default(no_tmp=True)
 
     self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -186,8 +189,9 @@ class TestHiveServer(RMFTestCase):
                               timeout = 30,
                               )
     self.assertNoMoreResources()
+    self.assertFalse(get_tar_mock.called)
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+  @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
   def test_stop_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
@@ -213,7 +217,7 @@ class TestHiveServer(RMFTestCase):
     
     self.assertNoMoreResources()
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+    
   def test_configure_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
@@ -227,7 +231,6 @@ class TestHiveServer(RMFTestCase):
 
   @patch("hive_service.check_fs_root")
   @patch("socket.socket")
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_start_secured(self, socket_mock, check_fs_root_mock):
     s = socket_mock.return_value
 
@@ -273,7 +276,6 @@ class TestHiveServer(RMFTestCase):
 
 
   @patch("socket.socket")
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
   def test_stop_secured(self, socket_mock):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                        classname = "HiveServer",
@@ -300,89 +302,51 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self, no_tmp = False):
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/hcat',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/share/HDP-webhcat/hive.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/hive',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0700,
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+                              security_enabled=False,
+                              keytab=UnknownConfigurationMock(),
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              mode=0777,
+                              owner='hive',
+                              bin_dir='/usr/bin',
+                              action=['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+                              security_enabled=False,
+                              keytab=UnknownConfigurationMock(),
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              mode=0700,
+                              owner='hive',
+                              bin_dir='/usr/bin',
+                              action=['create_delayed'],
     )
+
     if not no_tmp:
-      self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
-          security_enabled = False,
-          hadoop_conf_dir = '/etc/hadoop/conf',
-          keytab = UnknownConfigurationMock(),
-          kinit_path_local = '/usr/bin/kinit',
-          user = 'hdfs',
-          owner = 'hive',
-          group = 'hdfs',
-          hadoop_bin_dir = '/usr/bin',
-          type = 'directory',
-          action = ['create_on_execute'],
-          mode = 0777,
+      self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
+                                security_enabled=False,
+                                keytab=UnknownConfigurationMock(),
+                                conf_dir='/etc/hadoop/conf',
+                                hdfs_user='hdfs',
+                                kinit_path_local='/usr/bin/kinit',
+                                mode=0777,
+                                owner='hive',
+                                group='hdfs',
+                                action=['create_delayed'],
+                                bin_dir='/usr/bin',
       )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled=False,
+                              keytab=UnknownConfigurationMock(),
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              action=['create'],
+                              bin_dir='/usr/bin',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
@@ -489,88 +453,49 @@ class TestHiveServer(RMFTestCase):
 
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/hcat',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        source = '/usr/share/HDP-webhcat/hive.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'file',
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/hive',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0700,
-    )
-    self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hive',
-        group = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
+                              security_enabled=True,
+                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              mode=0777,
+                              owner='hive',
+                              bin_dir='/usr/bin',
+                              action=['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsDirectory', '/user/hive',
+                              security_enabled=True,
+                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              mode=0700,
+                              owner='hive',
+                              bin_dir='/usr/bin',
+                              action=['create_delayed'],
+    )
+    self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
+                              security_enabled=True,
+                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              mode=0777,
+                              owner='hive',
+                              group='hdfs',
+                              action=['create_delayed'],
+                              bin_dir='/usr/bin',
+    )
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled=True,
+                              keytab='/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs',
+                              kinit_path_local='/usr/bin/kinit',
+                              action=['create'],
+                              bin_dir='/usr/bin',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
@@ -699,7 +624,6 @@ class TestHiveServer(RMFTestCase):
       self.assert_configure_default()
 
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   @patch("hive_server.HiveServer.pre_rolling_restart")
   @patch("hive_server.HiveServer.start")
   def test_stop_during_upgrade(self, hive_server_start_mock,
@@ -847,7 +771,6 @@ class TestHiveServer(RMFTestCase):
     )
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   def test_pre_rolling_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
@@ -862,32 +785,9 @@ class TestHiveServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
                               'hdp-select set hive-server2 %s' % version,)
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        mode = 0444,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
     self.assertNoMoreResources()
 
   @patch("resource_management.core.shell.call")
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
   def test_pre_rolling_restart_23(self, call_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
@@ -907,28 +807,6 @@ class TestHiveServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
                               'hdp-select set hive-server2 %s' % version,)
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        mode = 0444,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
     self.assertNoMoreResources()
 
     self.assertEquals(2, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 86d348f..7bbd0a9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -24,13 +24,12 @@ import datetime, sys, socket
 import  resource_management.libraries.functions
 @patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
 @patch("socket.socket")
-@patch("time.time", new=MagicMock(return_value=1431110511.43))
 class TestServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
-
-  def test_service_check_default(self, socket_mock):
+  @patch("sys.exit")
+  def test_service_check_default(self, sys_exit_mock, socket_mock):
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                         classname="HiveServiceCheck",
@@ -83,52 +82,16 @@ class TestServiceCheck(RMFTestCase):
                               content = StaticFile('templetonSmoke.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
-        content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig no_keytab false /usr/bin/kinit no_principal',
-        logoutput = True,
-        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-        tries = 3,
-        try_sleep = 5,
-    )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 no_keytab false /usr/bin/kinit no_principal',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
 
-
-  def test_service_check_secured(self, socket_mock):
+  @patch("sys.exit")
+  def test_service_check_secured(self, sys_exit_mock, socket_mock):
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                         classname="HiveServiceCheck",
@@ -182,47 +145,10 @@ class TestServiceCheck(RMFTestCase):
                               content = StaticFile('templetonSmoke.sh'),
                               mode = 0755,
                               )
-    
-    self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
-        content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
-        action = ['create_on_execute'],
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
-        action = ['create_on_execute'],
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
-        logoutput = True,
-        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-        tries = 3,
-        try_sleep = 5,
-    )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
\ No newline at end of file


[5/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index f2d7edb..3fa1c9c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -117,6 +117,37 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
@@ -134,6 +165,42 @@ class TestWebHCatServer(RMFTestCase):
                               group = 'hadoop',
                               recursive = True,
                               )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',
@@ -158,6 +225,37 @@ class TestWebHCatServer(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
@@ -179,6 +277,42 @@ class TestWebHCatServer(RMFTestCase):
                               path = ['/bin'],
                               user = 'hcat',
                               )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
+    )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index b7f8209..d0a71c0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -50,27 +50,17 @@ class TestOozieServer(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('HdfsResource', '/user/oozie',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'oozie',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               group = 'hadoop',
@@ -335,26 +325,16 @@ class TestOozieServer(RMFTestCase):
 
 
   def assert_configure_default(self):
-    self.assertResourceCalled('HdfsResource', '/user/oozie',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'oozie',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
@@ -501,29 +481,17 @@ class TestOozieServer(RMFTestCase):
 
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsResource', '/user/oozie',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'oozie',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/user/oozie',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0775,
+                              owner = 'oozie',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
                               owner = 'oozie',
                               group = 'hadoop',
@@ -861,30 +829,9 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     glob_mock.assert_called_with('/usr/hdp/2.2.1.0-2135/hadoop/lib/hadoop-lzo*.jar')
 
-    self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.1.0-2135',)
-    self.assertResourceCalled('HdfsResource', '/user/oozie/share',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'oozie',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
+    self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.1.0-2135')
+    self.assertResourceCalled('Execute', 'hdfs dfs -chown oozie:hadoop /user/oozie/share', user='oozie')
+    self.assertResourceCalled('Execute', 'hdfs dfs -chmod -R 755 /user/oozie/share', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
 
@@ -943,32 +890,9 @@ class TestOozieServer(RMFTestCase):
     glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
     self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.3.0.0-1234')
-    self.assertResourceCalled('HdfsResource', '/user/oozie/share',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'oozie',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
-    self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run',
-        user = 'oozie',
-    )
+    self.assertResourceCalled('Execute', 'hdfs dfs -chown oozie:hadoop /user/oozie/share', user='oozie')
+    self.assertResourceCalled('Execute', 'hdfs dfs -chmod -R 755 /user/oozie/share', user='oozie')
+    self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
 
     self.assertNoMoreResources()
@@ -1019,29 +943,8 @@ class TestOozieServer(RMFTestCase):
     isfile_mock.assert_called_with('/usr/share/HDP-oozie/ext-2.2.zip')
 
     self.assertResourceCalled('Execute', 'hdp-select set oozie-server 2.2.0.0-0000')
-    self.assertResourceCalled('HdfsResource', '/user/oozie/share',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'oozie',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
+    self.assertResourceCalled('Execute', 'hdfs dfs -chown oozie:hadoop /user/oozie/share', user='oozie')
+    self.assertResourceCalled('Execute', 'hdfs dfs -chmod -R 755 /user/oozie/share', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/ooziedb.sh upgrade -run', user='oozie')
     self.assertResourceCalled('Execute', '/usr/hdp/current/oozie-server/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020', user='oozie')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
index 151ce14..bcd5ad9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
@@ -21,7 +21,6 @@ from stacks.utils.RMFTestCase import *
 import resource_management.libraries.functions
 from mock.mock import MagicMock, call, patch
 
-@patch("glob.glob", new = MagicMock(return_value="/usr/something/oozie-client/lib"))
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 class TestServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "OOZIE/4.0.0.2.0/package"
@@ -56,50 +55,10 @@ class TestServiceCheck(RMFTestCase):
         content = StaticFile('oozieSmoke2.sh'),
         mode = 0755,
     )
-    self.assertResourceCalled('File', '/tmp/prepareOozieHdfsDirectories.sh',
-        content = StaticFile('prepareOozieHdfsDirectories.sh'),
-        mode = 0755,
-    )
-    self.assertResourceCalled('Execute', '/tmp/prepareOozieHdfsDirectories.sh /etc/oozie/conf / /etc/hadoop/conf ',
+    self.assertResourceCalled('Execute', '/tmp/oozieSmoke2.sh',
         logoutput = True,
         tries = 3,
-        try_sleep = 5,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '//examples',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '//examples/input-data',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
-    self.assertResourceCalled('Execute', '/tmp/oozieSmoke2.sh suse /var/lib/oozie /etc/oozie/conf /usr/bin / /etc/hadoop/conf /usr/bin ambari-qa False',
-        logoutput = True,
+        command = '/tmp/oozieSmoke2.sh suse /var/lib/oozie /etc/oozie/conf /usr/bin /etc/hadoop/conf /usr/bin ambari-qa False',
         path = ['/usr/bin:/usr/bin'],
-        tries = 3,
         try_sleep = 5,
     )
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
index 79e3697..df30085 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
@@ -4,7 +4,7 @@
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file
 distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file`
+regarding copyright ownership.  The ASF licenses this file
 to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
@@ -32,35 +32,16 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'ambari-qa',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
+      try_sleep = 5,
+      tries = 3,
+      user = 'ambari-qa',
+      conf_dir = '/etc/hadoop/conf',
+      security_enabled = False,
+      principal = UnknownConfigurationMock(),
+      keytab = UnknownConfigurationMock(),
+      bin_dir = '/usr/bin',
+      kinit_path_local = '/usr/bin/kinit'
     )
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -75,7 +56,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
     )
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',
@@ -91,35 +72,16 @@ class TestPigServiceCheck(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'ambari-qa',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ',
+      try_sleep = 5,
+      tries = 3,
+      user = 'ambari-qa',
+      principal = 'ambari-qa@EXAMPLE.COM',
+      conf_dir = '/etc/hadoop/conf',
+      security_enabled = True, 
+      keytab = '/etc/security/keytabs/smokeuser.headless.keytab',
+      bin_dir = '/usr/bin',
+      kinit_path_local = '/usr/bin/kinit'
     )
        
     self.assertResourceCalled('File', '/tmp/pigSmoke.sh',
@@ -134,7 +96,7 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep = 5,
     )
        
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e pigsmoke.out',
       user = 'ambari-qa',
       bin_dir = '/usr/bin',
       conf_dir = '/etc/hadoop/conf',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 8d6743a..efec6f7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -19,8 +19,6 @@ limitations under the License.
 '''
 import json
 from mock.mock import MagicMock, call, patch
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import version
 from stacks.utils.RMFTestCase import *
 import os
 
@@ -137,66 +135,72 @@ class TestHistoryServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-
-    self.assertResourceCalled('HdfsResource', '/app-logs',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'yarn',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred/system',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mr-history/done',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        change_permissions_for_parents = True,
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              mode = 0777,
+                              bin_dir = '/usr/bin'
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       group = 'hadoop',
@@ -346,66 +350,72 @@ class TestHistoryServer(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
-
-    self.assertResourceCalled('HdfsResource', '/app-logs',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        user = 'hdfs',
-        kinit_path_local = '/usr/bin/kinit',
-        recursive_chmod = True,
-        owner = 'yarn',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mapred/system',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/mr-history/done',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        change_permissions_for_parents = True,
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'mapred',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
       owner = 'yarn',
       group = 'hadoop',
@@ -667,8 +677,6 @@ class TestHistoryServer(RMFTestCase):
     )
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
 
-  @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value="2.3.0"))
-  @patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.3.0.0-1234"))
   def test_pre_rolling_restart_23(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
@@ -687,32 +695,9 @@ class TestHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute', 'hdp-select set hadoop-mapreduce-historyserver %s' % version)
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.3.0.0-1234/mapreduce//mapreduce.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-        mode = 0444,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
+    self.assertEquals(3, mocks_dict['call'].call_count)
     self.assertEquals(
       "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
        mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 2634879..26fab63 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -19,7 +19,6 @@ limitations under the License.
 '''
 import json
 from mock.mock import MagicMock, call, patch
-from resource_management.libraries.functions import version
 from stacks.utils.RMFTestCase import *
 import os
 
@@ -28,7 +27,6 @@ origin_exists = os.path.exists
 @patch.object(os.path, "exists", new=MagicMock(
   side_effect=lambda *args: origin_exists(args[0])
   if args[0][-2:] == "j2" else True))
-@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.2.0.0-1234"))
 class TestMapReduce2Client(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -356,7 +354,6 @@ class TestMapReduce2Client(RMFTestCase):
                               )
     self.assertNoMoreResources()
 
-  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                    classname = "MapReduce2Client",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
index bc91487..01073df 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
@@ -39,25 +39,19 @@ class TestServiceCheck(RMFTestCase):
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mapredsmokeinput',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mapredsmokeoutput /user/ambari-qa/mapredsmokeinput',
+                      try_sleep = 5,
+                      tries = 1,
+                      user = 'ambari-qa',
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
+                      conf_dir = '/etc/hadoop/conf',
     )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
+                      try_sleep = 5,
+                      tries = 1,
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
+                      user = 'ambari-qa',
+                      conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
                       logoutput = True,
@@ -83,28 +77,22 @@ class TestServiceCheck(RMFTestCase):
                       hdp_stack_version = self.STACK_VERSION,
                       target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mapredsmokeinput',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'hdfs',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',
+                      user = 'ambari-qa',
     )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mapredsmokeoutput /user/ambari-qa/mapredsmokeinput',
+                      try_sleep = 5,
+                      tries = 1,
+                      user = 'ambari-qa',
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
+                      conf_dir = '/etc/hadoop/conf',
     )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',
-        user = 'ambari-qa',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
+                      try_sleep = 5,
+                      tries = 1,
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
+                      user = 'ambari-qa',
+                      conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
                       logoutput = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index f579e9b..46e7cbc 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -134,6 +134,72 @@ class TestNodeManager(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 01777,
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               group = 'hadoop',
@@ -320,6 +386,72 @@ class TestNodeManager(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/app-logs',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              recursive_chmod = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              bin_dir = '/usr/bin',
+                              mode = 0777,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'mapred',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mapred/system',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/tmp',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              bin_dir = '/usr/bin',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/mr-history/done',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 01777,
+                              bin_dir = '/usr/bin',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              bin_dir = '/usr/bin',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/hadoop/yarn/local',
                               owner = 'yarn',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index 94f35be..f40beac 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -21,17 +21,12 @@ import json
 import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
-from resource_management.libraries.functions import version
-from resource_management.libraries.script.script import Script
 
 origin_exists = os.path.exists
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch.object(os.path, "exists", new=MagicMock(
   side_effect=lambda *args: origin_exists(args[0])
   if args[0][-2:] == "j2" else True))
-
-@patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
-@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.0.0.0-1234"))
 class TestResourceManager(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -58,39 +53,6 @@ class TestResourceManager(RMFTestCase):
 
     self.assert_configure_default()
 
-    self.assertResourceCalled('HdfsResource', '/apps/tez/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'tez',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'tez',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
                               action = ['delete'],
                               not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
@@ -144,6 +106,7 @@ class TestResourceManager(RMFTestCase):
     self.assert_configure_secured()
 
     pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1'
+
     self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
                               not_if = pid_check_cmd,
                               action=['delete'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index dc248d6..02b020b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -21,7 +21,6 @@ import json
 import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
-from resource_management.libraries.functions import version
 from resource_management.libraries.script.script import Script
 
 origin_exists = os.path.exists
@@ -516,7 +515,6 @@ class TestYarnClient(RMFTestCase):
     self.assertNoMoreResources()
 
 
-  @patch.object(version, "get_hdp_build_version", new=MagicMock(return_value="2.2.0.0-2041"))
   def test_upgrade(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/yarn_client.py",
                    classname = "YarnClient",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index 485e882..c90d085 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -532,17 +532,8 @@
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
         "user_group": "hadoop",
-        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+        "mapreduce_tar_destination_folder" : "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
+        "mapreduce_tar_source" : "/usr/hdp/current/hadoop-client/mapreduce.tar.gz"
       },
 
       "hbase-env": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 0b54d8c..f000ef3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -547,18 +547,7 @@
             "kerberos_domain": "EXAMPLE.COM",
             "user_group": "hadoop",
             "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
-            "kinit_path_local": "/usr/bin",
-	        "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-	        "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-	        "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
-	        "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
-	        "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-	        "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-	        "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-	        "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-	        "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-	        "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
+            "kinit_path_local": "/usr/bin"
         },
         "hadoop-env": {
             "namenode_opt_maxnewsize": "200m",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 8fa7cd2..d4017f7 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -122,31 +122,30 @@ class TestFalconServer(RMFTestCase):
                               owner = 'falcon',
                               recursive = True
                               )
-    self.assertResourceCalled('HdfsResource', '/apps/falcon',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'falcon',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-        mode = 0777,
-    )
+    self.assertResourceCalled('HdfsDirectory', '/apps/falcon',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'falcon',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed']
+                              )
     self.assertResourceCalled('Directory', '/hadoop/falcon/store',
-        owner = 'falcon',
-        recursive = True,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+                              owner = 'falcon',
+                              recursive = True
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
                               owner = 'falcon',
                               recursive = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
index bb0ce90..cd6f693 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
@@ -21,7 +21,7 @@ limitations under the License.
 from stacks.utils.RMFTestCase import *
 
 
-class TestFalconServiceCheck(RMFTestCase):
+class TestFalconServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "FALCON/0.5.0.2.1/package"
   STACK_VERSION = "2.1"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
index 1eeba7e..50d99dd 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
@@ -21,7 +21,7 @@ limitations under the License.
 from stacks.utils.RMFTestCase import *
 
 
-class TestTezServiceCheck(RMFTestCase):
+class TestFalconServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "TEZ/0.4.0.2.1/package"
   STACK_VERSION = "2.1"
 
@@ -33,56 +33,60 @@ class TestTezServiceCheck(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              try_sleep = 5,
+                              kinit_path_local = '/usr/bin/kinit',
+                              tries = 3,
+                              user = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              principal = UnknownConfigurationMock(),
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp/tezsmokeinput',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
     self.assertResourceCalled('File', '/tmp/sample-tez-test',
-        content = 'foo\nbar\nfoo\nbar\nfoo',
-        mode = 0755,
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput/sample-tez-test',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/tmp/sample-tez-test',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+                              content = 'foo\nbar\nfoo\nbar\nfoo',
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-tez-test /tmp/tezsmokeinput/',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/tez/tez-mapreduce-examples*.jar orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/',
-        try_sleep = 5,
-        tries = 3,
-        bin_dir = '/usr/bin',
-        user = 'ambari-qa',
-        conf_dir = '/etc/hadoop/conf',
-    )
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/tezsmokeoutput/_SUCCESS',
-        try_sleep = 6,
-        tries = 10,
-        bin_dir = '/usr/bin',
-        user = 'ambari-qa',
-        conf_dir = '/etc/hadoop/conf',
-    )
+                              try_sleep = 6,
+                              tries = 10,
+                              bin_dir = '/usr/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
     self.assertNoMoreResources()
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index b7968d2..dd3f4f2 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -20,13 +20,17 @@ limitations under the License.
 from mock.mock import patch, MagicMock
 
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions import dynamic_variable_interpretation
 
 
 class TestPigServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.2"
 
-  def test_service_check_secure(self):
+  @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs")
+  def test_service_check_secure(self, copy_tarball_mock):
+    copy_tarball_mock.return_value = 0
+
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                        classname="PigServiceCheck",
                        command="service_check",
@@ -34,43 +38,22 @@ class TestPigServiceCheck(RMFTestCase):
                        hdp_stack_version=self.STACK_VERSION,
                        target=RMFTestCase.TARGET_COMMON_SERVICES
     )
-    
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_on_execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'ambari-qa',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs@EXAMPLE.COM',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /usr/hdp/current/hadoop-client/conf dfs -put /etc/passwd passwd ",
+      try_sleep=5,
+      tries=3,
+      user="ambari-qa",
+      conf_dir="/usr/hdp/current/hadoop-client/conf",
+      security_enabled=True,
+      principal="ambari-qa@EXAMPLE.COM",
+      keytab="/etc/security/keytabs/smokeuser.headless.keytab",
+      bin_dir="/usr/hdp/current/hadoop-client/bin",
+      kinit_path_local="/usr/bin/kinit"
     )
+
     self.assertResourceCalled("File", "/tmp/pigSmoke.sh",
       content=StaticFile("pigSmoke.sh"),
       mode=0755
     )
-    
-    
 
     self.assertResourceCalled("Execute", "pig /tmp/pigSmoke.sh",
       path=["/usr/hdp/current/pig-client/bin:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"],
@@ -79,55 +62,27 @@ class TestPigServiceCheck(RMFTestCase):
       try_sleep=5
     )
 
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
-        bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        user = 'ambari-qa',
-        conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/pigsmoke.out',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'ambari-qa',
-        action = ['delete_on_execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        source = '/etc/passwd',
-        user = 'ambari-qa',
-        action = ['create_on_execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'file',
-    )
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs@EXAMPLE.COM',
-        owner = 'hdfs',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
+    self.assertResourceCalled("ExecuteHadoop", "fs -test -e pigsmoke.out",
+      user="ambari-qa",
+      bin_dir="/usr/hdp/current/hadoop-client/bin",
+      conf_dir="/usr/hdp/current/hadoop-client/conf"
     )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs@EXAMPLE.COM',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+
+    # Specific to HDP 2.2 and kerberized cluster
+    self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /usr/hdp/current/hadoop-client/conf dfs -put /etc/passwd passwd ",
+      tries=3,
+      try_sleep=5,
+      user="ambari-qa",
+      conf_dir="/usr/hdp/current/hadoop-client/conf",
+      keytab="/etc/security/keytabs/smokeuser.headless.keytab",
+      principal="ambari-qa@EXAMPLE.COM",
+      security_enabled=True,
+      kinit_path_local="/usr/bin/kinit",
+      bin_dir="/usr/hdp/current/hadoop-client/bin"
     )
 
+    copy_tarball_mock.assert_called_once_with("tez", "hadoop-client", "ambari-qa", "hdfs", "hadoop")
+
     self.assertResourceCalled("Execute", "/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;",
       user="ambari-qa")
 
@@ -138,10 +93,10 @@ class TestPigServiceCheck(RMFTestCase):
       user="ambari-qa"
     )
 
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/pigsmoke.out',
-        bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        user = 'ambari-qa',
-        conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    self.assertResourceCalled("ExecuteHadoop", "fs -test -e pigsmoke.out",
+      user="ambari-qa",
+      bin_dir="/usr/hdp/current/hadoop-client/bin",
+      conf_dir="/usr/hdp/current/hadoop-client/conf"
     )
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 4e8ec42..60d7924 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -93,28 +93,6 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/spark.service.keytab spark/localhost@EXAMPLE.COM; ',
         user = 'spark',
     )
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
-        security_enabled = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = UnknownConfigurationMock(),
-        owner = UnknownConfigurationMock(),
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = UnknownConfigurationMock(),
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid` >/dev/null 2>&1',
@@ -150,26 +128,16 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'hadoop',
         recursive = True,
     )
-    self.assertResourceCalled('HdfsResource', '/user/spark',
+    self.assertResourceCalled('HdfsDirectory', '/user/spark',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         keytab = UnknownConfigurationMock(),
+        conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
         mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        owner = 'spark',
+        bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        action = ['create'],
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         key_value_delimiter = ' ',
@@ -207,26 +175,16 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'hadoop',
         recursive = True,
     )
-    self.assertResourceCalled('HdfsResource', '/user/spark',
+    self.assertResourceCalled('HdfsDirectory', '/user/spark',
         security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
         keytab = UnknownConfigurationMock(),
+        conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hdfs_user = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
-        user = UnknownConfigurationMock(),
-        owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        type = 'directory',
-        action = ['create_on_execute'],
         mode = 0775,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = UnknownConfigurationMock(),
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        owner = 'spark',
+        bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        action = ['create'],
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         key_value_delimiter = ' ',
@@ -271,28 +229,6 @@ class TestJobHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute', 'hdp-select set spark-historyserver {0}'.format(version))
-    self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz',
-        security_enabled = False,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-        keytab = UnknownConfigurationMock(),
-        source = '/usr/hdp/current/tez-client/lib/tez.tar.gz',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        group = 'hadoop',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
-    )
     self.assertNoMoreResources()
 
     self.assertEquals(2, mocks_dict['call'].call_count)


[2/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

This reverts commit bf9301247db36a504bc02a39ab8cc92fcca10a52.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d2ebd9c9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d2ebd9c9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d2ebd9c9

Branch: refs/heads/trunk
Commit: d2ebd9c9ea9414a293ec73f416e9aac40c9ad48a
Parents: 714838d
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon May 11 15:20:26 2015 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon May 11 15:30:02 2015 -0400

----------------------------------------------------------------------
 .../common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d2ebd9c9/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 7a3401e..5f3f5f6 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -16,7 +16,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 
-"""
+"""HBASE/0.96.0.2.0/package/scripts/params_linux.py
 from resource_management.libraries.functions import conf_select
 from resource_management import *
 from resource_management.libraries.functions import format


[8/8] ambari git commit: Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

Posted by jo...@apache.org.
Revert "AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)"

This reverts commit 6e8dce44336c0afaa34a5aefbc500a116b4b91e8.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e833066e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e833066e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e833066e

Branch: refs/heads/trunk
Commit: e833066e72050bed2b34cbdb00d70167a7c09835
Parents: 20161e6
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon May 11 15:20:41 2015 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon May 11 15:30:03 2015 -0400

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |   7 -
 .../resource_management/TestContentSources.py   |   6 +-
 .../resource_management/TestCopyFromLocal.py    |  68 +++
 .../TestPropertiesFileResource.py               |  10 +-
 .../TestRepositoryResource.py                   |   6 +-
 .../TestXmlConfigResource.py                    |   8 +-
 .../python/resource_management/core/source.py   |  11 +-
 .../libraries/functions/__init__.py             |   1 +
 .../libraries/functions/version.py              |  29 +-
 .../libraries/providers/__init__.py             |   3 +-
 .../libraries/providers/copy_from_local.py      |  94 ++++
 .../libraries/providers/hdfs_directory.py       | 112 +++++
 .../libraries/providers/hdfs_resource.py        | 111 -----
 .../libraries/resources/__init__.py             |   3 +-
 .../libraries/resources/copy_from_local.py      |  41 ++
 .../libraries/resources/hdfs_directory.py       |  45 ++
 .../libraries/resources/hdfs_resource.py        |  76 ----
 .../libraries/script/script.py                  |   2 -
 .../1.6.1.2.2.0/package/scripts/accumulo.py     |   6 +-
 .../package/scripts/accumulo_configuration.py   |  14 +-
 .../1.6.1.2.2.0/package/scripts/params.py       |  19 +-
 .../0.1.0/package/scripts/hbase.py              |  14 +-
 .../0.1.0/package/scripts/params.py             |  16 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |  23 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |  20 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |  14 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  21 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   1 +
 .../package/files/fast-hdfs-resource.jar        | Bin 19285282 -> 0 bytes
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |   5 -
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  15 +-
 .../package/scripts/hdfs_nfsgateway.py          |   1 +
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |   1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  20 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |  53 ++-
 .../0.12.0.2.0/package/files/templetonSmoke.sh  |  22 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     | 116 ++---
 .../0.12.0.2.0/package/scripts/hive_server.py   |  13 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  63 +--
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |  71 +++
 .../package/scripts/webhcat_service_check.py    |  28 +-
 .../package/templates/templeton_smoke.pig.j2    |  24 -
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  17 +-
 .../1.0.0.2.3/package/scripts/service_check.py  |  49 +-
 .../4.0.0.2.0/package/files/oozieSmoke2.sh      |  52 ++-
 .../files/prepareOozieHdfsDirectories.sh        |  45 --
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   6 +-
 .../package/scripts/oozie_server_upgrade.py     |  16 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |  22 +-
 .../4.0.0.2.0/package/scripts/service_check.py  |  38 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  25 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  67 ++-
 .../package/scripts/job_history_server.py       |  10 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  21 +-
 .../1.2.0.2.2/package/scripts/setup_spark.py    |   6 +-
 .../1.2.0.2.2/package/scripts/spark_service.py  |   9 +-
 .../STORM/0.9.1.2.1/configuration/storm-env.xml |  12 +-
 .../0.4.0.2.1/package/scripts/params_linux.py   |  25 +-
 .../0.4.0.2.1/package/scripts/service_check.py  |  53 ++-
 .../2.1.0.2.0/package/scripts/historyserver.py  |  25 +-
 .../2.1.0.2.0/package/scripts/install_jars.py   |  68 +--
 .../package/scripts/mapred_service_check.py     |  27 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  37 +-
 .../package/scripts/resourcemanager.py          |  12 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |   2 +-
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |  38 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     | 192 ++++----
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  91 ++++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   6 -
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   6 -
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 448 +++++++++----------
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   6 -
 .../stacks/2.0.6/HDFS/test_service_check.py     |  59 ++-
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |   6 -
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  12 -
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 328 +++++---------
 .../2.0.6/HIVE/test_hive_service_check.py       | 106 +----
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 134 ++++++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 177 ++------
 .../stacks/2.0.6/OOZIE/test_service_check.py    |  45 +-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  |  84 +---
 .../stacks/2.0.6/YARN/test_historyserver.py     | 281 ++++++------
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   3 -
 .../2.0.6/YARN/test_mapreduce2_service_check.py |  64 ++-
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 132 ++++++
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |  39 +-
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   2 -
 .../python/stacks/2.0.6/configs/default.json    |  13 +-
 .../python/stacks/2.0.6/configs/secured.json    |  13 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  47 +-
 .../stacks/2.1/FALCON/test_service_check.py     |   2 +-
 .../python/stacks/2.1/TEZ/test_service_check.py | 100 +++--
 .../stacks/2.2/PIG/test_pig_service_check.py    | 121 ++---
 .../stacks/2.2/SPARK/test_job_history_server.py |  88 +---
 .../test/python/stacks/2.2/configs/default.json |  13 +-
 .../test/python/stacks/2.2/configs/secured.json |  13 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |  63 ++-
 .../dependency-reduced-pom.xml                  |  42 --
 contrib/fast-hdfs-resource/pom.xml              |  86 ----
 .../fast-hdfs-resource/resources/example.json   |  57 ---
 .../resources/test_perfomance.sh                |  46 --
 .../ambari/fast_hdfs_resource/Resource.java     | 295 ------------
 .../ambari/fast_hdfs_resource/Runner.java       |  93 ----
 103 files changed, 2144 insertions(+), 3064 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index b807ba9..4dde1d9 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -426,12 +426,6 @@
                 </source>
               </sources>
             </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/lib</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
           </mappings>
         </configuration>
       </plugin>
@@ -557,7 +551,6 @@
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>${package.log.dir}</path>
-                <path>/var/lib/${project.artifactId}/lib</path>
               </paths>
               <mapper>
                 <type>perm</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-agent/src/test/python/resource_management/TestContentSources.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestContentSources.py b/ambari-agent/src/test/python/resource_management/TestContentSources.py
index c144cf9..d35ec9a 100644
--- a/ambari-agent/src/test/python/resource_management/TestContentSources.py
+++ b/ambari-agent/src/test/python/resource_management/TestContentSources.py
@@ -221,7 +221,7 @@ class TestContentSources(TestCase):
       content = template.get_content()
     self.assertEqual(open_mock.call_count, 1)
 
-    self.assertEqual(u'test template content', content)
+    self.assertEqual(u'test template content\n', content)
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     self.assertEqual(getmtime_mock.call_count, 1)
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
@@ -234,7 +234,7 @@ class TestContentSources(TestCase):
       template = InlineTemplate("{{test_arg1}} template content", [], test_arg1 = "test")
       content = template.get_content()
 
-    self.assertEqual(u'test template content', content)
+    self.assertEqual(u'test template content\n', content)
 
   def test_template_imports(self):
     """
@@ -250,4 +250,4 @@ class TestContentSources(TestCase):
     with Environment("/base") as env:
       template = InlineTemplate("{{test_arg1}} template content {{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = ["/one","two"])
       content = template.get_content()
-    self.assertEqual(u'test template content /one/two', content)
+    self.assertEqual(u'test template content /one/two\n', content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
new file mode 100644
index 0000000..1ec1858
--- /dev/null
+++ b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
@@ -0,0 +1,68 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from mock.mock import patch, MagicMock
+from resource_management import *
+from resource_management.core import shell
+
+@patch.object(shell, "call", new = MagicMock(return_value=(1, "")))
+@patch.object(System, "os_family", new = 'redhat')
+class TestCopyFromLocal(TestCase):
+
+  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
+  def test_run_default_args(self, execute_hadoop_mock):
+    with Environment() as env:
+      CopyFromLocal('/user/testdir/*.files',
+        owner='user1',
+        dest_dir='/apps/test/',
+        kinnit_if_needed='',
+        hdfs_user='hdfs'
+      )
+      self.assertEqual(execute_hadoop_mock.call_count, 2)
+      call_arg_list = execute_hadoop_mock.call_args_list
+      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
+                       call_arg_list[0][0][0].command)
+      print call_arg_list[0][0][0].arguments
+      self.assertEquals({'not_if': "ambari-sudo.sh su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
+                        call_arg_list[0][0][0].arguments)
+      self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+
+
+  @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
+  def test_run_with_chmod(self, execute_hadoop_mock):
+    with Environment() as env:
+      CopyFromLocal('/user/testdir/*.files',
+        mode=0655,
+        owner='user1',
+        group='hdfs',
+        dest_dir='/apps/test/',
+        kinnit_if_needed='',
+        hdfs_user='hdfs'
+      )
+      self.assertEqual(execute_hadoop_mock.call_count, 3)
+      call_arg_list = execute_hadoop_mock.call_args_list
+      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
+                       call_arg_list[0][0][0].command)
+      self.assertEquals({'not_if': "ambari-sudo.sh su user1 -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
+                        call_arg_list[0][0][0].arguments)
+      self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
index 5c97380..bb91159 100644
--- a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
@@ -65,7 +65,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={}
       )
 
-    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    ', encoding=None)
+    create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n', encoding=None)
     ensure_mock.assert_called()
 
 
@@ -98,7 +98,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={},
       )
 
-    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    ', encoding=None)
+    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated by Apache Ambari. Some other day\n    \n    \n', encoding=None)
     ensure_mock.assert_called()
 
 
@@ -131,7 +131,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={'property1': 'value1'},
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    ', encoding=None)
+    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty1=value1\n    \n', encoding=None)
     ensure_mock.assert_called()
 
 
@@ -169,7 +169,7 @@ class TestPropertiesFIleResource(TestCase):
                      },
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    ", encoding=None)
+    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - %m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n", encoding=None)
     ensure_mock.assert_called()
 
 
@@ -206,5 +206,5 @@ class TestPropertiesFIleResource(TestCase):
       )
 
     read_file_mock.assert_called()
-    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    ', encoding=None)
+    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by Apache Ambari. 777\n    \nproperty_1=value1\n    \n', encoding=None)
     ensure_mock.assert_called()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
index c31cc20..6b94481 100644
--- a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
@@ -164,7 +164,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
       
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
@@ -205,7 +205,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
 
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
 
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', content=StaticFile('/tmp/1.txt'))")
@@ -239,7 +239,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb http://download.base_url.org/rpm/ a b c\n')
       
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(execute_mock.call_count, 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
index 6092717..a7eaae9 100644
--- a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
@@ -62,7 +62,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n  </configuration>\n', encoding='UTF-8')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -91,7 +91,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name>property1</name>\n      <value>value1</value>\n      <attr>attr_value</attr>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -144,7 +144,7 @@ class TestXmlConfigResource(TestCase):
                     }
                 })
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      <attr1>x</attr1>\n    </property>\n    \n    <property>\n      <name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    \n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p %c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n    <property>\n      <name>prop.4</name>\n      <value>${oozie.log.dir}/oozie.log</value>\n      <attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    </property>\n    \n    <property>\n      <name>prop.empty</name>\n      <value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch.object(sudo, "create_file")
@@ -177,7 +177,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n      <value></value>\n    </property>\n    \n    <property>\n      <name>first</name>\n      <value>should be first</value>\n    </property>\n    \n    <property>\n      <name>second</name>\n      <value>should be second</value>\n    </property>\n    \n    <property>\n      <name>third</name>\n      <value>should be third</value>\n    </property>\n    \n    <property>\n      <name>z_last</name>\n      <value>should be last</value>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
 
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch.object(sudo, "path_exists")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/core/source.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/source.py b/ambari-common/src/main/python/resource_management/core/source.py
index 4eecc7d..9d1fc76 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -141,7 +141,7 @@ else:
       self.context.update(variables)
       
       rendered = self.template.render(self.context)
-      return rendered
+      return rendered + "\n" if not rendered.endswith('\n') else rendered
     
   class InlineTemplate(Template):
     def __init__(self, name, extra_imports=[], **kwargs):
@@ -189,14 +189,9 @@ class DownloadSource(Source):
         opener = urllib2.build_opener()
       
       req = urllib2.Request(self.url)
-      
-      try:
-        web_file = opener.open(req)
-      except urllib2.HTTPError as ex:
-        raise Fail("Failed to download file from {0} due to HTTP error: {1}".format(self.url, str(ex)))
-      
+      web_file = opener.open(req)
       content = web_file.read()
-      
+
       if self.cache:
         with open(filepath, 'w') as fp:
           fp.write(content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index ae6e47b..e06d246 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -40,6 +40,7 @@ from resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.constants import *
 from resource_management.libraries.functions.get_hdp_version import *
 from resource_management.libraries.functions.get_lzo_packages import *
+from resource_management.libraries.functions.dynamic_variable_interpretation import *
 from resource_management.libraries.functions.setup_ranger_plugin import *
 
 IS_WINDOWS = platform.system() == "Windows"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/functions/version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version.py b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
index d9f20da..1de6bf8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
@@ -19,13 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
-import os
 import re
-from resource_management.core import shell
-from resource_management.core.exceptions import Fail
-from resource_management.libraries.script.config_dictionary import UnknownConfiguration
-
-__all__ = ["format_hdp_stack_version", "compare_versions", "get_hdp_build_version"]
 
 def _normalize(v, desired_segments=0):
   """
@@ -76,25 +70,4 @@ def compare_versions(version1, version2):
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   """
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
-  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
-
-
-def get_hdp_build_version(hdp_stack_version):
-  """
-  Used to check hdp_stack_version for stacks >= 2.2
-  :param hdp_stack_version: version for stacks >= 2.2
-  :return: checked hdp_version (or UnknownConfiguration for stacks < 2.2)
-  """
-  HDP_SELECT = "/usr/bin/hdp-select"
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.2.0.0") >= 0 and os.path.exists(HDP_SELECT):
-    code, out = shell.call('{0} status'.format(HDP_SELECT))
-
-    matches = re.findall(r"([\d\.]+\-\d+)", out)
-    hdp_version = matches[0] if matches and len(matches) > 0 else None
-
-    if not hdp_version:
-      raise Fail("Could not parse HDP version from output of hdp-select: %s" % str(out))
-
-    return hdp_version
-  else:
-    return UnknownConfiguration('hdp_version')
\ No newline at end of file
+  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index 44e9ca1..34b10a9 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -42,7 +42,8 @@ PROVIDERS = dict(
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    HdfsResource="resource_management.libraries.providers.hdfs_resource.HdfsResourceProvider",
+    HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider",
+    CopyFromLocal="resource_management.libraries.providers.copy_from_local.CopyFromLocalProvider",
     ModifyPropertiesFile="resource_management.libraries.providers.modify_properties_file.ModifyPropertiesFileProvider"
   ),
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
new file mode 100644
index 0000000..bd9805c
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import os
+from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
+from resource_management.core.providers import Provider
+from resource_management.libraries.functions.format import format
+from resource_management.core.shell import as_user
+from resource_management.core.resources.system import Execute
+
+class CopyFromLocalProvider(Provider):
+  def action_run(self):
+
+    path = self.resource.path
+    dest_dir = self.resource.dest_dir
+    dest_file = self.resource.dest_file
+    kinnit_if_needed = self.resource.kinnit_if_needed
+    user = self.resource.user   # user to perform commands as. If not provided, default to the owner
+    owner = self.resource.owner
+    group = self.resource.group
+    mode = self.resource.mode
+    hdfs_usr=self.resource.hdfs_user
+    hadoop_conf_path = self.resource.hadoop_conf_dir
+    bin_dir = self.resource.hadoop_bin_dir
+
+
+    if dest_file:
+      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}/{dest_file}")
+      dest_path = dest_dir + dest_file if dest_dir.endswith(os.sep) else dest_dir + os.sep + dest_file
+    else:
+      dest_file_name = os.path.split(path)[1]
+      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
+      dest_path = dest_dir + os.sep + dest_file_name
+    # Need to run unless as resource user
+    
+    if kinnit_if_needed:
+      Execute(kinnit_if_needed, 
+              user=user if user else owner,
+      )
+    
+    unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls {dest_path}"), user if user else owner)
+
+    ExecuteHadoop(copy_cmd,
+                  not_if=unless_cmd,
+                  user=user if user else owner,
+                  bin_dir=bin_dir,
+                  conf_dir=hadoop_conf_path
+                  )
+
+    if not owner:
+      chown = None
+    else:
+      if not group:
+        chown = owner
+      else:
+        chown = format('{owner}:{group}')
+
+    if chown:
+      chown_cmd = format("fs -chown {chown} {dest_path}")
+
+      ExecuteHadoop(chown_cmd,
+                    user=hdfs_usr,
+                    bin_dir=bin_dir,
+                    conf_dir=hadoop_conf_path)
+    pass
+
+    if mode:
+      dir_mode = oct(mode)[1:]
+      chmod_cmd = format('fs -chmod {dir_mode} {dest_path}')
+
+      ExecuteHadoop(chmod_cmd,
+                    user=hdfs_usr,
+                    bin_dir=bin_dir,
+                    conf_dir=hadoop_conf_path)
+    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
new file mode 100644
index 0000000..9fa2de7
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+
+from resource_management import *
+directories_list = [] #direcotries list for mkdir
+chmod_map = {} #(mode,recursive):dir_list map
+chown_map = {} #(owner,group,recursive):dir_list map
+class HdfsDirectoryProvider(Provider):
+  def action_create_delayed(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    if not self.resource.dir_name:
+      return
+
+    dir_name = self.resource.dir_name
+    dir_owner = self.resource.owner
+    dir_group = self.resource.group
+    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
+    directories_list.append(self.resource.dir_name)
+
+    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
+    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
+    # grouping directories by mode/owner/group to modify them in one 'chXXX' call
+    if dir_mode:
+      chmod_key = (dir_mode,recursive_chmod_str)
+      if chmod_map.has_key(chmod_key):
+        chmod_map[chmod_key].append(dir_name)
+      else:
+        chmod_map[chmod_key] = [dir_name]
+
+    if dir_owner:
+      owner_key = (dir_owner,dir_group,recursive_chown_str)
+      if chown_map.has_key(owner_key):
+        chown_map[owner_key].append(dir_name)
+      else:
+        chown_map[owner_key] = [dir_name]
+
+  def action_create(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    self.action_create_delayed()
+
+    hdp_conf_dir = self.resource.conf_dir
+    hdp_hdfs_user = self.resource.hdfs_user
+    secured = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+    bin_dir = self.resource.bin_dir
+
+    chmod_commands = []
+    chown_commands = []
+
+    for chmod_key, chmod_dirs in chmod_map.items():
+      mode = chmod_key[0]
+      recursive = chmod_key[1]
+      chmod_dirs_str = ' '.join(chmod_dirs)
+      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod {recursive} {mode} {chmod_dirs_str}"))
+
+    for chown_key, chown_dirs in chown_map.items():
+      owner = chown_key[0]
+      group = chown_key[1]
+      recursive = chown_key[2]
+      chown_dirs_str = ' '.join(chown_dirs)
+      if owner:
+        chown = owner
+        if group:
+          chown = format("{owner}:{group}")
+        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown {recursive} {chown} {chown_dirs_str}"))
+
+    if secured:
+        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
+                user=hdp_hdfs_user)
+    #create all directories in one 'mkdir' call
+    dir_list_str = ' '.join(directories_list)
+    #for hadoop 2 we need to specify -p to create directories recursively
+    parent_flag = '-p'
+
+    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
+                   chmod_cmd=' && '.join(chmod_commands),
+                   chown_cmd=' && '.join(chown_commands)),
+            user=hdp_hdfs_user,
+            path=bin_dir,
+            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}"), hdp_hdfs_user)
+    )
+
+    directories_list[:] = []
+    chmod_map.clear()
+    chown_map.clear()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
deleted file mode 100644
index 1d711f5..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# !/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import json
-from resource_management import *
-
-JSON_PATH = '/var/lib/ambari-agent/data/hdfs_resources.json'
-JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
-
-RESOURCE_TO_JSON_FIELDS = {
-  'target': 'target',
-  'type': 'type',
-  'action': 'action',
-  'source': 'source',
-  'owner': 'owner',
-  'group': 'group',
-  'mode': 'mode',
-  'recursive_chown': 'recursiveChown',
-  'recursive_chmod': 'recursiveChmod',
-  'change_permissions_for_parents': 'changePermissionforParents'
-}
-
-
-class HdfsResourceProvider(Provider):
-  def action_delayed(self, action_name):
-    resource = {}
-    env = Environment.get_instance()
-    if not 'hdfs_files' in env.config:
-      env.config['hdfs_files'] = []
-
-    # Check required parameters
-    if not self.resource.type or not self.resource.action:
-      raise Fail("Resource parameter type or action is not set.")
-
-    # Put values in dictionary-resource
-    for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
-      if field_name == 'action':
-        resource[json_field_name] = action_name
-      elif field_name == 'mode' and self.resource.mode:
-        resource[json_field_name] = oct(self.resource.mode)[1:]
-      elif getattr(self.resource, field_name):
-        resource[json_field_name] = getattr(self.resource, field_name)
-
-    # Add resource to create
-    env.config['hdfs_files'].append(resource)
-
-  def action_create_on_execute(self):
-    self.action_delayed("create")
-
-  def action_delete_on_execute(self):
-    self.action_delayed("delete")
-
-  def action_execute(self):
-    env = Environment.get_instance()
-
-    # Check required parameters
-    if not self.resource.user:
-      raise Fail("Resource parameter 'user' is not set.")
-
-    if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
-      raise Fail("No resources to create. Please perform create_delayed"
-                 " or delete_delayed before doing execute action.")
-
-    hadoop_bin_dir = self.resource.hadoop_bin_dir
-    hadoop_conf_dir = self.resource.hadoop_conf_dir
-    user = self.resource.user
-    security_enabled = self.resource.security_enabled
-    keytab_file = self.resource.keytab
-    kinit_path = self.resource.kinit_path_local
-    logoutput = self.resource.logoutput
-    jar_path=JAR_PATH
-    json_path=JSON_PATH
-
-    if security_enabled:
-      Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
-              user=user
-      )
-
-    # Write json file to disk
-    File(JSON_PATH,
-         owner = user,
-         content = json.dumps(env.config['hdfs_files'])
-    )
-
-    # Execute jar to create/delete resources in hadoop
-    Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} {json_path}"),
-            user=user,
-            path=[hadoop_bin_dir],
-            logoutput=logoutput,
-    )
-
-    # Clean
-    env.config['hdfs_files'] = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
index 524292f..596c2e2 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
@@ -26,6 +26,7 @@ from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_resource import *
+from resource_management.libraries.resources.hdfs_directory import *
+from resource_management.libraries.resources.copy_from_local import *
 from resource_management.libraries.resources.msi import *
 from resource_management.libraries.resources.modify_properties_file import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
new file mode 100644
index 0000000..42050a3
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["CopyFromLocal"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+class CopyFromLocal(Resource):
+  action = ForcedListArgument(default="run")
+
+  path = ResourceArgument(default=lambda obj: obj.name)
+  dest_dir = ResourceArgument(required=True)
+  dest_file = ResourceArgument()
+  owner = ResourceArgument(required=True)  # file user owner
+  group = ResourceArgument()               # file group user
+  mode = ResourceArgument()                # file ACL mode
+  kinnit_if_needed = ResourceArgument(default='')
+  user = ResourceArgument()                # user to perform commands as. If not provided, default to the owner
+  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
+  hdfs_user = ResourceArgument(default='hdfs')
+  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
+
+  actions = Resource.actions + ["run"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
new file mode 100644
index 0000000..7888cd8
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsDirectory"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+class HdfsDirectory(Resource):
+  action = ForcedListArgument()
+
+  dir_name = ResourceArgument(default=lambda obj: obj.name)
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+
+  conf_dir = ResourceArgument()
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  hdfs_user = ResourceArgument()
+  bin_dir = ResourceArgument(default="")
+
+  #action 'create' immediately creates all pending directory in efficient manner
+  #action 'create_delayed' add directory to list of pending directories
+  actions = Resource.actions + ["create","create_delayed"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
deleted file mode 100644
index 92a043f..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# !/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsResource"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
-
-"""
-Calling a lot of hadoop commands takes too much time.
-The cause is that for every call new connection initialized, with datanodes, namenode.
-
-While this resource can gather the dicteroies/files to create/delete/copyFromLocal.
-And after just with one call create all that.
-
-action = create_delayed / delete_delayed. Are for gathering information  about what you want
-to create.
-
-After everything is gathered you should execute action = execute. To perform delayed actions
-
-The resource is a replacement for the following operations:
-  1) hadoop fs -rmr
-  2) hadoop fs -copyFromLocal
-  3) hadoop fs -put
-  4) hadoop fs -mkdir
-  5) hadoop fs -touchz
-  6) hadoop fs -chmod
-  7) hadoop fs -chown
-"""
-
-
-class HdfsResource(Resource):
-  # Required: {target, type, action}
-  # path to hadoop file/directory
-  target = ResourceArgument(default=lambda obj: obj.name)
-  # "directory" or "file"
-  type = ResourceArgument()
-  # "create_delayed" or "delete_delayed" or "execute"
-  action = ForcedListArgument()
-  # if present - copies file/directory from local path {source} to hadoop path - {target}
-  source = ResourceArgument()
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  logoutput = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-  change_permissions_for_parents = BooleanArgument(default=False)
-
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  user = ResourceArgument()
-  hadoop_bin_dir = ResourceArgument()
-  hadoop_conf_dir = ResourceArgument()
-
-  #action 'execute' immediately creates all pending files/directories in efficient manner
-  #action 'create_delayed/delete_delayed' adds file/directory to list of pending directories
-  actions = Resource.actions + ["create_on_execute", "delete_on_execute", "execute"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 5650807..9f8653b 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -297,7 +297,6 @@ class Script(object):
       return None
 
     stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-
     if stack_version_unformatted is None or stack_version_unformatted == '':
       return None
 
@@ -326,7 +325,6 @@ class Script(object):
     :return: True if the command's stack is less than the specified version
     """
     hdp_stack_version = Script.get_hdp_stack_version()
-
     if hdp_stack_version is None:
       return False
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
index 5e476a5..e974573 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
@@ -75,13 +75,13 @@ def accumulo(name=None # 'master' or 'tserver' or 'client'
       owner=params.accumulo_user
     )
 
-  if name == "master":
+  if name in ["master","tserver"]:
     params.HdfsDirectory(format("{params.accumulo_hdfs_root_dir}"),
-                         action="create_on_execute",
+                         action="create_delayed",
                          owner=params.accumulo_user,
     )
     params.HdfsDirectory(format("{params.accumulo_hdfs_stage_dir}"),
-                         action="create_on_execute",
+                         action="create_delayed",
                          owner=params.accumulo_user,
                          mode=0751
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
index f26fdd0..88d83d3 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
@@ -160,19 +160,17 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 'monitor' or 'gc' or '
 
   # other server setup
   if name == 'master':
-    params.HdfsResource(format("/user/{params.accumulo_user}"),
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory(format("/user/{params.accumulo_user}"),
+                         action="create_delayed",
                          owner=params.accumulo_user,
                          mode=0700
     )
-    params.HdfsResource(format("{params.parent_dir}"),
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory(format("{params.parent_dir}"),
+                         action="create_delayed",
                          owner=params.accumulo_user,
                          mode=0700
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
     if params.security_enabled and params.has_secure_user_auth:
       Execute( format("{params.kinit_cmd} "
                       "{params.daemon_script} init "
@@ -186,7 +184,6 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 'monitor' or 'gc' or '
                                      "{params.hadoop_conf_dir} fs -stat "
                                      "{params.instance_volumes}"),
                               params.accumulo_user),
-               logoutput=True,
                user=params.accumulo_user)
     else:
       passfile = format("{params.exec_tmp_dir}/pass")
@@ -208,7 +205,6 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 'monitor' or 'gc' or '
                                        "{params.hadoop_conf_dir} fs -stat "
                                        "{params.instance_volumes}"),
                                 params.accumulo_user),
-                 logoutput=True,
                  user=params.accumulo_user)
       finally:
         os.remove(passfile)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 5f3f5f6..3d2d36d 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -16,14 +16,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 
-"""HBASE/0.96.0.2.0/package/scripts/params_linux.py
+"""
 from resource_management.libraries.functions import conf_select
-from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 import status_params
 
@@ -149,14 +148,14 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
index 009076c..cd9f8f9 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
@@ -167,25 +167,23 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
     hbase_TemplateConfig( format("hbase_client_jaas.conf"), user=params.hbase_user)
     hbase_TemplateConfig( format("ams_zookeeper_jaas.conf"), user=params.hbase_user)
 
-  if name == "master":
+  if name in ["master","regionserver"]:
 
     if params.is_hbase_distributed:
 
-      params.HdfsResource(params.hbase_root_dir,
-                           type="directory",
-                           action="create_on_execute",
+      params.HdfsDirectory(params.hbase_root_dir,
+                           action="create_delayed",
                            owner=params.hbase_user,
                            mode=0775
       )
 
-      params.HdfsResource(params.hbase_staging_dir,
-                           type="directory",
-                           action="create_on_execute",
+      params.HdfsDirectory(params.hbase_staging_dir,
+                           action="create_delayed",
                            owner=params.hbase_user,
                            mode=0711
       )
 
-      params.HdfsResource(None, action="execute")
+      params.HdfsDirectory(None, action="create")
 
     else:
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 4a63e3c..7e516b8 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -185,17 +185,17 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+# create partial functions with common arguments for every HdfsDirectory call
+# to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
+  bin_dir = hadoop_bin_dir
+)
 
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index 86318f3..ed9098c 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -91,36 +91,33 @@ def falcon(type, action = None):
   if type == 'server':
     if action == 'config':
       if params.store_uri[0:4] == "hdfs":
-        params.HdfsResource(params.store_uri,
-                             type="directory",
-                             action="create_on_execute",
+        params.HdfsDirectory(params.store_uri,
+                             action="create_delayed",
                              owner=params.falcon_user,
                              mode=0755
         )
-      elif params.store_uri[0:4] == "file":
+      if params.store_uri[0:4] == "file":
         Directory(params.store_uri[7:],
                   owner=params.falcon_user,
                   recursive=True
         )
-      params.HdfsResource(params.flacon_apps_dir,
-                           type="directory",
-                           action="create_on_execute",
+      params.HdfsDirectory(params.flacon_apps_dir,
+                           action="create_delayed",
                            owner=params.falcon_user,
-                           mode=0777 #TODO change to proper mode
+                           mode=0777#TODO change to proper mode
       )
       if params.falcon_store_uri[0:4] == "hdfs":
-        params.HdfsResource(params.falcon_store_uri,
-                             type="directory",
-                             action="create_on_execute",
+        params.HdfsDirectory(params.falcon_store_uri,
+                             action="create_delayed",
                              owner=params.falcon_user,
                              mode=0755
         )
-      elif params.falcon_store_uri[0:4] == "file":
+      if params.falcon_store_uri[0:4] == "file":
         Directory(params.falcon_store_uri[7:],
                   owner=params.falcon_user,
                   recursive=True
         )
-      params.HdfsResource(None, action="execute")
+      params.HdfsDirectory(None, action="create")
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 recursive=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 421a981..27ced1d 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -18,13 +18,12 @@ limitations under the License.
 """
 import status_params
 
-from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 config = Script.get_config()
 
@@ -101,15 +100,14 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 smokeuser_principal =  config['configurations']['cluster-env']['smokeuser_principal_name']
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index 1bfa7e4..9a35a24 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -163,19 +163,17 @@ def hbase(name=None):
       group=params.user_group,
       owner=params.hbase_user
     )
-  if name == "master":
-    params.HdfsResource(params.hbase_hdfs_root_dir,
-                         type="directory",
-                         action="create_on_execute",
+  if name in ["master","regionserver"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
+                         action="create_delayed",
                          owner=params.hbase_user
     )
-    params.HdfsResource(params.hbase_staging_dir,
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory(params.hbase_staging_dir,
+                         action="create_delayed",
                          owner=params.hbase_user,
                          mode=0711
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
 def hbase_TemplateConfig(name, tag=None):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 5e9bf7e..aec8e23 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -24,7 +24,6 @@ from functions import calc_xmn_from_xms
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
-from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
@@ -32,7 +31,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 from resource_management.libraries.functions.substitute_vars import substitute_vars
 
 # server configurations
@@ -141,9 +140,7 @@ if security_enabled:
   _hostname_lowercase = config['hostname'].lower()
   master_jaas_princ = config['configurations']['hbase-site']['hbase.master.kerberos.principal'].replace('_HOST',_hostname_lowercase)
   regionserver_jaas_princ = config['configurations']['hbase-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
-  _queryserver_jaas_princ = config['configurations']['hbase-site']['phoenix.queryserver.kerberos.principal']
-  if not is_empty(_queryserver_jaas_princ):
-    queryserver_jaas_princ =_queryserver_jaas_princ.replace('_HOST',_hostname_lowercase)
+  queryserver_jaas_princ = config['configurations']['hbase-site']['phoenix.queryserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
 
 master_keytab_path = config['configurations']['hbase-site']['hbase.master.keytab.file']
 regionserver_keytab_path = config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
@@ -172,16 +169,16 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 
 # ranger host

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
index b774f19..a60ebad 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
@@ -46,6 +46,7 @@ class HbaseServiceCheckDefault(HbaseServiceCheck):
     env.set_params(params)
     
     output_file = "/apps/hbase/data/ambarismoketest"
+    test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
deleted file mode 100644
index defde28..0000000
Binary files a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar and /dev/null differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index 95fe90c..a615c27 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -98,11 +98,6 @@ def hdfs(name=None):
        owner=tc_owner,
        content=Template("slaves.j2")
   )
-
-  # for source-code of jar goto contrib/fast-hdfs-resource
-  File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
-       content=StaticFile("fast-hdfs-resource.jar")
-  )
   
   if params.lzo_enabled and len(params.lzo_packages) > 0:
       Package(params.lzo_packages)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 453d824..e36019e 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -168,21 +168,18 @@ def create_name_dirs(directories):
 def create_hdfs_directories(check):
   import params
 
-  params.HdfsResource("/tmp",
-                       type="directory",
-                       action="create_on_execute",
+  params.HdfsDirectory("/tmp",
+                       action="create_delayed",
                        owner=params.hdfs_user,
                        mode=0777
   )
-  params.HdfsResource(params.smoke_hdfs_user_dir,
-                       type="directory",
-                       action="create_on_execute",
+  params.HdfsDirectory(params.smoke_hdfs_user_dir,
+                       action="create_delayed",
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode
   )
-  params.HdfsResource(None, 
-                      action="execute",
-                      only_if=check #skip creation when HA not active
+  params.HdfsDirectory(None, action="create",
+                       only_if=check #skip creation when HA not active
   )
 
 def format_namenode(force=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
index ac0e24d..ad5ef89 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
@@ -22,6 +22,7 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources import Directory
 from resource_management.core import shell
 from utils import service
+from utils import hdfs_directory
 import subprocess,os
 
 # NFS GATEWAY is always started by root using jsvc due to rpcbind bugs

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
index 78ef977..3feff67 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
+from utils import hdfs_directory
 from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
 from ambari_commons import OSConst
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 39e4b97..82a6351 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -32,8 +32,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 from resource_management.libraries.functions.format_jvm_option import format_jvm_option
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 
@@ -97,7 +96,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
       hadoop_secure_dn_user = '""'
 
 
-ambari_libs_dir = "/var/lib/ambari-agent/lib"
+
 limits_conf_dir = "/etc/security/limits.d"
 
 if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
@@ -285,19 +284,18 @@ else:
   jn_kinit_cmd = ""
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 
-
 # The logic for LZO also exists in OOZIE's params.py
 io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
 lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
index 7a1e6b7..dd319b0 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
@@ -37,6 +37,15 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
 
     safemode_command = format("dfsadmin -fs {namenode_address} -safemode get | grep OFF")
 
+    create_dir_cmd = format("fs -mkdir {dir}")
+    chmod_command = format("fs -chmod 777 {dir}")
+    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir} fs -test -e {dir}"), params.hdfs_user)
+    cleanup_cmd = format("fs -rm {tmp_file}")
+    #cleanup put below to handle retries; if retrying there wil be a stale file
+    #that needs cleanup; exit code is fn of second command
+    create_file_cmd = format(
+      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd {tmp_file}")
+    test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
         user=params.hdfs_user
@@ -49,23 +58,39 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
                   tries=20,
                   bin_dir=params.hadoop_bin_dir
     )
-    params.HdfsResource(dir,
-                        type="directory",
-                        action="create_on_execute",
-                        mode=0777
+    ExecuteHadoop(create_dir_cmd,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  not_if=test_dir_exists,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
-    params.HdfsResource(tmp_file,
-                        type="file",
-                        action="delete_on_execute",
+    ExecuteHadoop(chmod_command,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
-
-    params.HdfsResource(tmp_file,
-                        type="file",
-                        source="/etc/passwd",
-                        action="create_on_execute"
+    ExecuteHadoop(create_file_cmd,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
+    )
+    ExecuteHadoop(test_cmd,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
-
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port
       checkWebUIFileName = "checkWebUI.py"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
index 2083312..d16848d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
@@ -23,11 +23,10 @@
 export ttonhost=$1
 export smoke_test_user=$2
 export templeton_port=$3
-export ttonTestScript=$4
-export smoke_user_keytab=$5
-export security_enabled=$6
-export kinit_path_local=$7
-export smokeuser_principal=$8
+export smoke_user_keytab=$4
+export security_enabled=$5
+export kinit_path_local=$6
+export smokeuser_principal=$7
 export ttonurl="http://${ttonhost}:${templeton_port}/templeton/v1"
 
 if [[ $security_enabled == "true" ]]; then
@@ -68,8 +67,21 @@ if [[ $security_enabled == "true" ]]; then
 fi
 
 #try pig query
+outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
+ttonTestOutput="/tmp/idtest.${outname}.out";
+ttonTestInput="/tmp/idtest.${outname}.in";
 ttonTestScript="idtest.${outname}.pig"
 
+echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
+echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
+echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
+
+#copy pig script to hdfs
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+
+#copy input file to hdfs
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+
 #create, copy post args file
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt