You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/09/29 15:17:55 UTC

[1/3] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.6 6de11b81b -> 5433e4792


http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index f0a89a9..fbe5403 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -96,7 +96,7 @@ class TestSparkThriftServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/spark',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.3.2.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -104,7 +104,7 @@ class TestSparkThriftServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.3.2.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         dfs_type = '',
@@ -113,7 +113,7 @@ class TestSparkThriftServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.3.2.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -122,7 +122,7 @@ class TestSparkThriftServer(RMFTestCase):
         user = 'hdfs',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         dfs_type = '',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.3.2.0-2067/hadoop/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
index 7ebe2f5..34ca102 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
@@ -171,8 +171,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = False,
                         keytab = None,
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = None,
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -189,8 +189,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = False,
                         keytab = None,
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = None,
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -202,8 +202,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = False,
                         keytab = None,
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = None,
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -611,8 +611,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = True,
                         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = 'hdfs-cl1@EXAMPLE.COM',
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -629,8 +629,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = True,
                         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = 'hdfs-cl1@EXAMPLE.COM',
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -642,8 +642,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = True,
                         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = 'hdfs-cl1@EXAMPLE.COM',
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py b/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
index 3c7e4a2..4f1fb17 100644
--- a/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
+++ b/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
@@ -56,7 +56,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', '/user/livy',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -64,7 +64,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -73,7 +73,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -82,12 +82,12 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('HdfsResource', '/livy-recovery',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -95,7 +95,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -104,7 +104,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -113,7 +113,7 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('File', '/usr/hdp/current/livy-server/conf/livy-env.sh',
                                   content = InlineTemplate(self.getConfig()['configurations']['livy-env']['content']),

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
index 103c86b..f7f054a 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
@@ -214,7 +214,7 @@
         "not_managed_hdfs_path_list": "[\"/tmp\"]",
         "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
         "java_version": "8",
-        "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-801\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.5\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-801\",\"baseSaved\":true},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.21\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"baseSaved\":true}]",
+        "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-777\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.5\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-777\",\"baseSaved\":true},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.21\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"baseSaved\":true}]",
         "package_list": "[{\"name\":\"ranger_${stack_version}-kms\",\"condition\":\"\",\"skipUpgrade\":false}]",
         "db_name": "ambari",
         "group_list": "[\"kms\",\"ranger\",\"hadoop\",\"users\"]",
@@ -230,7 +230,7 @@
         "service_package_folder": "common-services/RANGER_KMS/0.5.0.2.3/package",
         "script": "scripts/kms_server.py",
         "hooks_folder": "HDP/2.0.6/hooks",
-        "version": "2.5.0.0-801",
+        "version": "2.5.0.0-777",
         "max_duration_for_retries": "0",
         "command_retry_enabled": "false",
         "command_timeout": "600",
@@ -605,7 +605,7 @@
             "dfs.web.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
             "fs.permissions.umask-mode": "022",
             "dfs.namenode.stale.datanode.interval": "30000",
-            "dfs.datanode.ipc.address": "0.0.0.0:8010",
+            "dfs.datanode.ipc.address": "0.0.0.0:7770",
             "dfs.datanode.failed.volumes.tolerated": "0",
             "dfs.datanode.data.dir": "/grid/0/hadoop/hdfs/data",
             "dfs.namenode.http-address": "c6401.ambari.apache.org:50070",

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
index 28e67e9..533ac71 100644
--- a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
+++ b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
@@ -589,7 +589,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/druid',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -598,7 +598,7 @@ class TestDruid(RMFTestCase):
                               user = 'hdfs',
                               owner = 'druid',
                               group='hadoop',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',
@@ -609,7 +609,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/druid/data',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -617,7 +617,7 @@ class TestDruid(RMFTestCase):
                               principal_name = 'missing_principal',
                               user = 'hdfs',
                               owner = 'druid',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',
@@ -626,7 +626,7 @@ class TestDruid(RMFTestCase):
                               )
     self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         dfs_type = '',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -636,7 +636,7 @@ class TestDruid(RMFTestCase):
         principal_name = 'missing_principal',
         user = 'hdfs',
         owner = 'hdfs',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'],
         immutable_paths = [u'/apps/hive/warehouse',
@@ -649,7 +649,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp/druid-indexing',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -658,7 +658,7 @@ class TestDruid(RMFTestCase):
                               user = 'hdfs',
                               owner = 'druid',
                               group='hadoop',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',
@@ -668,7 +668,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/druid/logs',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -677,7 +677,7 @@ class TestDruid(RMFTestCase):
                               user = 'hdfs',
                               owner = 'druid',
                               group='hadoop',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
index c370bbc..60e7fd2 100644
--- a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
+++ b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
@@ -56,7 +56,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', '/user/livy',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -64,7 +64,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -73,7 +73,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -82,12 +82,12 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('HdfsResource', '/livy2-recovery',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -95,7 +95,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -104,7 +104,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -113,7 +113,7 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('File', '/usr/hdp/current/livy2-server/conf/livy-env.sh',
                                   content = InlineTemplate(self.getConfig()['configurations']['livy2-env']['content']),


[3/3] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)

Posted by jo...@apache.org.
AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5433e479
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5433e479
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5433e479

Branch: refs/heads/branch-2.6
Commit: 5433e479260dc9e13aacae1ea9edb9c29d6b96cb
Parents: 6de11b8
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Sep 27 11:52:11 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 29 11:09:10 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/component_version.py    |  26 +--
 .../libraries/functions/conf_select.py          |  79 ++--------
 .../libraries/functions/stack_select.py         |  69 ++++----
 .../ambari/server/agent/HeartbeatMonitor.java   |   6 +-
 .../ambari/server/agent/StatusCommand.java      |  27 +++-
 .../package/scripts/hive_server_upgrade.py      |   5 -
 .../0.12.0.2.0/package/scripts/params_linux.py  |   5 +-
 .../0.12.0.2.0/package/scripts/status_params.py |   8 +-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |   2 +-
 .../4.0/hooks/after-INSTALL/scripts/params.py   |   2 +-
 .../4.0/hooks/before-ANY/scripts/params.py      |   6 +-
 .../4.0/hooks/before-START/scripts/params.py    |   2 +-
 .../4.2.5/hooks/after-INSTALL/scripts/params.py |   2 +-
 .../4.2.5/hooks/before-ANY/scripts/params.py    |   6 +-
 .../4.2.5/hooks/before-START/scripts/params.py  |   2 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  11 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  53 +++----
 .../before-ANY/scripts/shared_initialization.py |   8 -
 .../2.0.6/hooks/before-START/scripts/params.py  |  17 +-
 .../services/ECS/package/scripts/params.py      |   2 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   1 -
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   6 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   7 +
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  38 ++---
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   5 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  42 +++--
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 158 ++++++++++---------
 .../2.0.6/HIVE/test_hive_service_check.py       |   4 +-
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |  41 +++--
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |   9 ++
 .../2.0.6/OOZIE/test_oozie_service_check.py     |   5 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |   5 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   1 +
 .../hooks/after-INSTALL/test_after_install.py   |  12 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   |   9 --
 .../stacks/2.1/FALCON/test_falcon_server.py     |  26 +--
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  54 ++++---
 .../stacks/2.2/PIG/test_pig_service_check.py    |  13 ++
 .../stacks/2.2/SPARK/test_job_history_server.py |  18 +--
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   2 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |  28 ++--
 .../2.3/SPARK/test_spark_thrift_server.py       |   8 +-
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    |  24 +--
 .../python/stacks/2.5/SPARK/test_spark_livy.py  |  16 +-
 .../stacks/2.5/configs/ranger-kms-secured.json  |   6 +-
 .../test/python/stacks/2.6/DRUID/test_druid.py  |  20 +--
 .../stacks/2.6/SPARK2/test_spark_livy2.py       |  16 +-
 47 files changed, 459 insertions(+), 453 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
index a1fd6b2..169b339 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 from resource_management.libraries.script.script import Script
 
-def get_component_repository_version(service_name, component_name = None):
+def get_component_repository_version(service_name = None, component_name = None):
   """
   Gets the version associated with the specified component from the structure in the command.
   Every command should contain a mapping of service/component to the desired repository it's set
@@ -29,11 +29,16 @@ def get_component_repository_version(service_name, component_name = None):
   :service_name: the name of the service
   :component_name: the name of the component
   """
-  versions = _get_component_repositories()
+  config = Script.get_config()
+
+  versions = _get_component_repositories(config)
   if versions is None:
     return None
 
-  if service_name not in versions:
+  if service_name is None:
+    service_name = config['serviceName'] if config is not None and 'serviceName' in config else None
+
+  if service_name is None or service_name not in versions:
     return None
 
   component_versions = versions[service_name]
@@ -41,22 +46,23 @@ def get_component_repository_version(service_name, component_name = None):
     return None
 
   if component_name is None:
-    for component in component_versions:
-      return component_versions[component]
+    component_name = config["role"] if config is not None and "role" in config else None
 
-  if not component_name in component_versions:
-    return None
+  # return a direct match of component name
+  if component_name is not None and component_name in component_versions:
+    return component_versions[component_name]
 
-  return component_versions[component_name]
+  # fall back to the first one for the service
+  return component_versions.values()[0]
 
 
-def _get_component_repositories():
+def _get_component_repositories(config):
   """
   Gets an initialized dictionary from the value in componentVersionMap. This structure is
   sent on every command by Ambari and should contain each service & component's desired repository.
+  :config:  the configuration dictionary
   :return:
   """
-  config = Script.get_config()
   if "componentVersionMap" not in config or config["componentVersionMap"] is "":
     return None
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index ffcaad5..86821bf 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -34,9 +34,11 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Link
+from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import stack_tools
 from resource_management.core.exceptions import Fail
+from resource_management.core import sudo
 from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import StackFeature
@@ -215,79 +217,28 @@ def select(stack_name, package, version, try_create=True, ignore_errors=False):
 
 
 
-def get_hadoop_conf_dir(force_latest_on_upgrade=False):
+def get_hadoop_conf_dir():
   """
-  Gets the shared hadoop conf directory using:
-  1.  Start with /etc/hadoop/conf
-  2.  When the stack is greater than HDP-2.2, use <stack-root>/current/hadoop-client/conf
-  3.  Only when doing a RU and HDP-2.3 or higher, use the value as computed
-      by <conf-selector-tool>.  This is in the form <stack-root>/VERSION/hadoop/conf to make sure
-      the configs are written in the correct place. However, if the component itself has
-      not yet been upgraded, it should use the hadoop configs from the prior version.
-      This will perform an <stack-selector-tool> status to determine which version to use.
-  :param force_latest_on_upgrade:  if True, then force the returned path to always
-  be that of the upgrade target version, even if <stack-selector-tool> has not been called. This
-  is primarily used by hooks like before-ANY to ensure that hadoop environment
-  configurations are written to the correct location since they are written out
-  before the <stack-selector-tool>/<conf-selector-tool> would have been called.
+  Return the hadoop shared conf directory which should be used for the command's component. The
+  directory including the component's version is tried first, but if that doesn't exist,
+  this will fallback to using "current".
   """
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  stack_name = None
   stack_root = Script.get_stack_root()
   stack_version = Script.get_stack_version()
-  version = None
 
-  if not Script.in_stack_upgrade():
-    # During normal operation, the HDP stack must be 2.3 or higher
-    if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
-      hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
-
-    if stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
-      hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
-      stack_name = default("/hostLevelParams/stack_name", None)
+  hadoop_conf_dir = os.path.join(os.path.sep, "etc", "hadoop", "conf")
+  if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
+    # read the desired version from the component map and use that for building the hadoop home
+    version = component_version.get_component_repository_version()
+    if version is None:
       version = default("/commandParams/version", None)
 
-      if not os.path.islink(hadoop_conf_dir) and stack_name and version:
-        version = str(version)
-  else:
-    # The "stack_version" is the desired stack, e.g., 2.2 or 2.3
-    # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
-    # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is
-    # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
-    if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
+    hadoop_conf_dir = os.path.join(stack_root, str(version), "hadoop", "conf")
+    if version is None or sudo.path_isdir(hadoop_conf_dir) is False:
       hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
 
-      # This contains the "version", including the build number, that is actually used during a stack upgrade and
-      # is the version upgrading/downgrading to.
-      stack_info = stack_select._get_upgrade_stack()
-
-      if stack_info is None:
-        raise Fail("Unable to retrieve the upgrade/downgrade stack information from the request")
-
-      stack_name = stack_info[0]
-      version = stack_info[1]
-
-      Logger.info(
-        "An upgrade/downgrade for {0}-{1} is in progress, determining which hadoop conf dir to use.".format(
-          stack_name, version))
-
-      # This is the version either upgrading or downgrading to.
-      if version and check_stack_feature(StackFeature.CONFIG_VERSIONING, version):
-        # Determine if <stack-selector-tool> has been run and if not, then use the current
-        # hdp version until this component is upgraded.
-        if not force_latest_on_upgrade:
-          current_stack_version = stack_select.get_role_component_current_stack_version()
-          if current_stack_version is not None and version != current_stack_version:
-            version = current_stack_version
-            stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
-            Logger.info("{0} has not yet been called to update the symlink for this component, "
-                        "keep using version {1}".format(stack_selector_name, current_stack_version))
-
-        # Only change the hadoop_conf_dir path, don't <conf-selector-tool> this older version
-        hadoop_conf_dir = os.path.join(stack_root, version, "hadoop", "conf")
-        Logger.info("Hadoop conf dir: {0}".format(hadoop_conf_dir))
-
-  Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
+    Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
+
   return hadoop_conf_dir
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index f5068e4..d8f3d37 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -28,16 +28,18 @@ import ambari_simplejson as json
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import stack_tools
 from resource_management.core import shell
+from resource_management.core import sudo
 from resource_management.core.shell import call
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version_select_util import get_versions_from_stack_root
-from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions import upgrade_summary
 
@@ -352,17 +354,13 @@ def get_role_component_current_stack_version():
   return current_stack_version
 
 
-def get_hadoop_dir(target, force_latest_on_upgrade=False):
+def get_hadoop_dir(target):
   """
-  Return the hadoop shared directory in the following override order
-  1. Use default for 2.1 and lower
-  2. If 2.2 and higher, use <stack-root>/current/hadoop-client/{target}
-  3. If 2.2 and higher AND for an upgrade, use <stack-root>/<version>/hadoop/{target}.
-  However, if the upgrade has not yet invoked <stack-selector-tool>, return the current
-  version of the component.
+  Return the hadoop shared directory which should be used for the command's component. The
+  directory including the component's version is tried first, but if that doesn't exist,
+  this will fallback to using "current".
+
   :target: the target directory
-  :force_latest_on_upgrade: if True, then this will return the "current" directory
-  without the stack version built into the path, such as <stack-root>/current/hadoop-client
   """
   stack_root = Script.get_stack_root()
   stack_version = Script.get_stack_version()
@@ -373,35 +371,26 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False):
   hadoop_dir = HADOOP_DIR_DEFAULTS[target]
 
   formatted_stack_version = format_stack_version(stack_version)
-  if formatted_stack_version and  check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
+
+  if stack_features.check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
+    # read the desired version from the component map and use that for building the hadoop home
+    version = component_version.get_component_repository_version()
+    if version is None:
+      version = default("/commandParams/version", None)
+
     # home uses a different template
     if target == "home":
-      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client")
+      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, version, "hadoop")
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client")
     else:
-      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client", target)
-
-    # if we are not forcing "current" for HDP 2.2, then attempt to determine
-    # if the exact version needs to be returned in the directory
-    if not force_latest_on_upgrade:
-      stack_info = _get_upgrade_stack()
-
-      if stack_info is not None:
-        stack_version = stack_info[1]
-
-        # determine if <stack-selector-tool> has been run and if not, then use the current
-        # hdp version until this component is upgraded
-        current_stack_version = get_role_component_current_stack_version()
-        if current_stack_version is not None and stack_version != current_stack_version:
-          stack_version = current_stack_version
-
-        if target == "home":
-          # home uses a different template
-          hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
-        else:
-          hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
+      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, version, "hadoop", target)
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client", target)
 
   return hadoop_dir
 
+
 def get_hadoop_dir_for_stack_version(target, stack_version):
   """
   Return the hadoop shared directory for the provided stack version. This is necessary
@@ -414,15 +403,11 @@ def get_hadoop_dir_for_stack_version(target, stack_version):
   if not target in HADOOP_DIR_DEFAULTS:
     raise Fail("Target {0} not defined".format(target))
 
-  hadoop_dir = HADOOP_DIR_DEFAULTS[target]
-
-  formatted_stack_version = format_stack_version(stack_version)
-  if formatted_stack_version and  check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
-    # home uses a different template
-    if target == "home":
-      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
-    else:
-      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
+  # home uses a different template
+  if target == "home":
+    hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
+  else:
+    hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
 
   return hadoop_dir
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 0042f53..a77ed75 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -210,7 +210,7 @@ public class HeartbeatMonitor implements Runnable {
    * @return list of commands to get status of service components on a concrete host
    */
   public List<StatusCommand> generateStatusCommands(String hostname) throws AmbariException {
-    List<StatusCommand> cmds = new ArrayList<StatusCommand>();
+    List<StatusCommand> cmds = new ArrayList<>();
 
     for (Cluster cl : clusters.getClustersForHost(hostname)) {
       Map<String, DesiredConfig> desiredConfigs = cl.getDesiredConfigs();
@@ -249,8 +249,8 @@ public class HeartbeatMonitor implements Runnable {
     StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
         stackId.getStackVersion());
 
-    Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>();
-    Map<String, Map<String,  Map<String, String>>> configurationAttributes = new TreeMap<String, Map<String, Map<String, String>>>();
+    Map<String, Map<String, String>> configurations = new TreeMap<>();
+    Map<String, Map<String,  Map<String, String>>> configurationAttributes = new TreeMap<>();
 
     // get the cluster config for type '*-env'
     // apply config group overrides

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
index 5dec53c..133da0b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
@@ -17,12 +17,13 @@
  */
 package org.apache.ambari.server.agent;
 
-import com.google.gson.annotations.SerializedName;
-import org.apache.ambari.server.state.State;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.ambari.server.state.State;
+
+import com.google.gson.annotations.SerializedName;
+
 /**
  * Command to report the status of a list of services in roles.
  */
@@ -38,6 +39,9 @@ public class StatusCommand extends AgentCommand {
   @SerializedName("serviceName")
   private String serviceName;
 
+  @SerializedName("role")
+  private String role;
+
   @SerializedName("componentName")
   private String componentName;
 
@@ -48,10 +52,10 @@ public class StatusCommand extends AgentCommand {
   private Map<String, Map<String, Map<String, String>>> configurationAttributes;
 
   @SerializedName("commandParams")
-  private Map<String, String> commandParams = new HashMap<String, String>();
+  private Map<String, String> commandParams = new HashMap<>();
 
   @SerializedName("hostLevelParams")
-  private Map<String, String> hostLevelParams = new HashMap<String, String>();
+  private Map<String, String> hostLevelParams = new HashMap<>();
 
   @SerializedName("hostname")
   private String hostname = null;
@@ -120,8 +124,17 @@ public class StatusCommand extends AgentCommand {
     return componentName;
   }
 
+  /**
+   * Sets both the {@code componentName} and the {@code role}. Status commands
+   * use the {@code componentName}, while execution commands use the
+   * {@code role}. It's simpler for the Python to just worry about {@code role},
+   * so this ensures that both are set.
+   *
+   * @param componentName
+   */
   public void setComponentName(String componentName) {
     this.componentName = componentName;
+    role = componentName;
   }
 
   public Map<String, Map<String, String>> getConfigurations() {
@@ -164,6 +177,10 @@ public class StatusCommand extends AgentCommand {
     return hostname;
   }
 
+  public String getRole() {
+    return role;
+  }
+
   public enum StatusCommandPayload {
     // The minimal payload for status, agent adds necessary details
     MINIMAL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 12c9e1c..1cb95ff 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -58,11 +58,6 @@ def deregister():
   if current_hiveserver_version is None:
     raise Fail('Unable to determine the current HiveServer2 version to deregister.')
 
-  # fallback when upgrading because <stack-root>/current/hive-server2/conf/conf.server may not exist
-  hive_server_conf_dir = params.hive_server_conf_dir
-  if not os.path.exists(hive_server_conf_dir):
-    hive_server_conf_dir = "/etc/hive/conf.server"
-
   # deregister
   hive_execute_path = params.execute_path
   # If upgrading, the upgrade-target hive binary should be used to call the --deregister command.

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 77e1bed..bcc1826 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -17,7 +17,6 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
 import status_params
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 import os
@@ -36,6 +35,7 @@ from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
@@ -109,7 +109,8 @@ stack_supports_hive_interactive_ga = check_stack_feature(StackFeature.HIVE_INTER
 component_directory = status_params.component_directory
 component_directory_interactive = status_params.component_directory_interactive
 
-hadoop_home = format('{stack_root}/current/hadoop-client')
+hadoop_home = stack_select.get_hadoop_dir("home")
+
 hive_bin = format('{stack_root}/current/{component_directory}/bin')
 hive_schematool_ver_bin = format('{stack_root}/{version}/hive/bin')
 hive_schematool_bin = format('{stack_root}/current/{component_directory}/bin')

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index f5b00ac..3a3e3f0 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -25,7 +25,7 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -103,17 +103,17 @@ else:
   hive_conf_dir = format("{stack_root}/current/{component_directory}/conf")
   hive_client_conf_dir = format("{stack_root}/current/{component_directory}/conf")
 
-  if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major):
+  if stack_features.check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major):
     hive_server_conf_dir = format("{stack_root}/current/{component_directory}/conf/conf.server")
     hive_conf_dir = hive_server_conf_dir
 
-  if check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major):
+  if stack_features.check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major):
     # this is NOT a typo. Configs for hcatalog/webhcat point to a
     # specific directory which is NOT called 'conf'
     webhcat_conf_dir = format("{stack_root}/current/hive-webhcat/etc/webhcat")
 
   # if stack version supports hive serve interactive
-  if check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major):
+  if stack_features.check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major):
     hive_server_interactive_conf_dir = format("{stack_root}/current/{component_directory_interactive}/conf/conf.server")
 
   hive_config_dir = hive_client_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 7f5eff6..e2036e5 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -82,7 +82,7 @@ def webhcat():
             )
 
   # if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created
-  if params.stack_version_formatted_major  and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version_formatted_major) and \
+  if check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version_formatted_major) and \
        params.version and params.stack_root:
     XmlConfig("hive-site.xml",
       conf_dir = format("{stack_root}/{version}/hive/conf"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/after-INSTALL/scripts/params.py
index d3332db..f5c716b 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/after-INSTALL/scripts/params.py
@@ -85,4 +85,4 @@ namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
 
 if has_namenode:
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-ANY/scripts/params.py
index 5ffd28c..91212bd 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-ANY/scripts/params.py
@@ -95,8 +95,8 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 # upgrades would cause these directories to have a version instead of "current"
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
-hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
-hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_secure_dn_user = hdfs_user
@@ -186,7 +186,7 @@ has_ranger_admin = not len(ranger_admin_hosts) == 0
 
 
 if has_namenode:
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 hbase_tmp_dir = "/tmp/hbase-hbase"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-START/scripts/params.py
index 5c84a05..d72868a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-START/scripts/params.py
@@ -120,7 +120,7 @@ metrics_collection_period = default("/configurations/ams-site/timeline.metrics.s
 
 if has_namenode:
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
   
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
index c497054..0de8fe7 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
@@ -90,7 +90,7 @@ namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
 
 if has_namenode or dfs_type == 'HCFS':
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 link_configs_lock_file = os.path.join(tmp_dir, "link_configs_lock_file")
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-ANY/scripts/params.py
index b0467a9..1ed6d4d 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-ANY/scripts/params.py
@@ -98,8 +98,8 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 # upgrades would cause these directories to have a version instead of "current"
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
-hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
-hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_secure_dn_user = hdfs_user
@@ -187,7 +187,7 @@ has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 
 if has_namenode or dfs_type == 'HCFS':
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 hbase_tmp_dir = "/tmp/hbase-hbase"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-START/scripts/params.py
index be9db58..615fcff 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/before-START/scripts/params.py
@@ -139,7 +139,7 @@ metrics_collection_period = default("/configurations/ams-site/timeline.metrics.s
 
 if has_namenode or dfs_type == 'HCFS':
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
 
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 4d7eaee..29a74e7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -43,16 +43,9 @@ stack_version_formatted = format_stack_version(stack_version_unformatted)
 major_stack_version = get_major_version(stack_version_formatted)
 
 # default hadoop params
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
-# HDP 2.2+ params
-if Script.is_stack_greater_or_equal("2.2"):
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-
-  # not supported in HDP 2.2+
-  hadoop_conf_empty_dir = None
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
 
 versioned_stack_root = '/usr/hdp/current'
 
@@ -93,7 +86,7 @@ namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
 
 if has_namenode or dfs_type == 'HCFS':
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 link_configs_lock_file = get_config_lock_file()
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index e085225..8ad0d51 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -101,49 +101,38 @@ def is_secure_port(port):
   else:
     return False
 
-# hadoop default params
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-
 # upgrades would cause these directories to have a version instead of "current"
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
-hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
-hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
-versioned_stack_root = '/usr/hdp/current'
 hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
 is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])
 
-# HDP 2.2+ params
-if Script.is_stack_greater_or_equal("2.2"):
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-
-  # not supported in HDP 2.2+
-  hadoop_conf_empty_dir = None
-
-  if not security_enabled:
-    hadoop_secure_dn_user = '""'
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+
+if not security_enabled:
+  hadoop_secure_dn_user = '""'
+else:
+  dfs_dn_port = get_port(dfs_dn_addr)
+  dfs_dn_http_port = get_port(dfs_dn_http_addr)
+  dfs_dn_https_port = get_port(dfs_dn_https_addr)
+  # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
+  if dfs_http_policy == "HTTPS_ONLY":
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
+  elif dfs_http_policy == "HTTP_AND_HTTPS":
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
+  else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
+  if secure_dn_ports_are_in_use:
+    hadoop_secure_dn_user = hdfs_user
   else:
-    dfs_dn_port = get_port(dfs_dn_addr)
-    dfs_dn_http_port = get_port(dfs_dn_http_addr)
-    dfs_dn_https_port = get_port(dfs_dn_https_addr)
-    # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
-    if dfs_http_policy == "HTTPS_ONLY":
-      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
-    elif dfs_http_policy == "HTTP_AND_HTTPS":
-      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
-    else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
-      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
-    if secure_dn_ports_are_in_use:
-      hadoop_secure_dn_user = hdfs_user
-    else:
-      hadoop_secure_dn_user = '""'
+    hadoop_secure_dn_user = '""'
 
 #hadoop params
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
@@ -219,7 +208,7 @@ if dfs_ha_namenode_ids:
     dfs_ha_enabled = True
 
 if has_namenode or dfs_type == 'HCFS':
-    hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+    hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
     hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 
 hbase_tmp_dir = "/tmp/hbase-hbase"

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index 3997117..3dfffdd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -189,14 +189,6 @@ def setup_hadoop_env():
     # create /etc/hadoop
     Directory(params.hadoop_dir, mode=0755)
 
-    # HDP < 2.2 used a conf -> conf.empty symlink for /etc/hadoop/
-    if Script.is_stack_less_than("2.2"):
-      Directory(params.hadoop_conf_empty_dir, create_parents = True, owner="root",
-        group=params.user_group )
-
-      Link(params.hadoop_conf_dir, to=params.hadoop_conf_empty_dir,
-         not_if=format("ls {hadoop_conf_dir}"))
-
     # write out hadoop-env.sh, but only if the directory exists
     if os.path.exists(params.hadoop_conf_dir):
       File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), owner=tc_owner,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 5ca2d94..55a6093 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -68,20 +68,13 @@ hadoop_metrics2_properties_content = None
 if 'hadoop-metrics2.properties' in config['configurations']:
   hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
 
-# hadoop default params
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
-hadoop_home = '/usr'
-create_lib_snappy_symlinks = True
-
-# HDP 2.2+ params
-if Script.is_stack_greater_or_equal("2.2"):
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_home = stack_select.get_hadoop_dir("home")
-  create_lib_snappy_symlinks = False
+
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+hadoop_home = stack_select.get_hadoop_dir("home")
+create_lib_snappy_symlinks = False
   
 current_service = config['serviceName']
 
@@ -189,7 +182,7 @@ if has_zk_host:
 
 if has_namenode or dfs_type == 'HCFS':
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
 
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
index c304a93..652c23e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
@@ -49,7 +49,7 @@ smoke_hdfs_user_mode = 0770
 java64_home = config['hostLevelParams']['java_home']
 java_version = int(config['hostLevelParams']['java_version'])
 
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hadoop_dir = "/etc/hadoop"

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
index 135b239..1cde55a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -223,7 +223,6 @@ class TestHBaseClient(RMFTestCase):
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-client', '2.2.1.0-2067'), sudo=True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-client', '2.2.1.0-2067'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.2.1.0-2067'), sudo=True)
-    self.assertEquals(1, mocks_dict['call'].call_count)
 
 
   @patch("resource_management.core.shell.call")

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 4ade11a..370b776 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -678,7 +678,7 @@ class TestHBaseMaster(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'hbase',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
@@ -694,7 +694,7 @@ class TestHBaseMaster(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'hbase',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0711,
@@ -711,7 +711,7 @@ class TestHBaseMaster(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 972aa61..f27a3b9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -34,6 +34,7 @@ class TestPhoenixQueryServer(RMFTestCase):
 
   CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"PHOENIX_QUERY_SERVER"}
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_configure_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -48,6 +49,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assert_configure_default()
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_start_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -67,6 +69,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_stop_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -90,6 +93,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_configure_secured(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -104,6 +108,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assert_configure_secured()
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_start_secured(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -123,6 +128,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_stop_secured(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -422,6 +428,7 @@ class TestPhoenixQueryServer(RMFTestCase):
       content = InlineTemplate('log4jproperties\nline2')
     )
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.3/configs/hbase_default.json"
     with open(config_file, "r") as f:

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 24b0347..b1a4154 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -25,9 +25,11 @@ from resource_management.core import shell
 import itertools
 from resource_management.core.exceptions import Fail
 import resource_management.libraries.functions.mounted_dirs_helper
+from resource_management.libraries.functions import conf_select
 
 @patch.object(resource_management.libraries.functions, 'check_process_status', new = MagicMock())
 @patch.object(Script, 'format_package_name', new = MagicMock())
+@patch.object(conf_select, "get_hadoop_conf_dir", new=MagicMock(return_value="/usr/hdp/current/hadoop-client/conf"))
 class TestDatanode(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -73,7 +75,7 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
@@ -96,7 +98,7 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
@@ -143,7 +145,7 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
@@ -163,7 +165,7 @@ class TestDatanode(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assert_configure_secured("2.2", snappy_enabled=False)
+    self.assert_configure_secured("2.3", snappy_enabled=False)
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -183,8 +185,8 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode',
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
@@ -206,7 +208,7 @@ class TestDatanode(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assert_configure_secured("2.2", snappy_enabled=False)
+    self.assert_configure_secured("2.3", snappy_enabled=False)
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -226,8 +228,8 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'",
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
@@ -249,7 +251,7 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
@@ -279,8 +281,8 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode',
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'])
@@ -312,8 +314,8 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'",
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'])
@@ -346,19 +348,19 @@ class TestDatanode(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
                               )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
                               mode = 0644
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+    self.assertResourceCalled('File', '/usr/hdp/current/hadoop-client/conf/slaves',
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
@@ -390,7 +392,7 @@ class TestDatanode(RMFTestCase):
                               )
 
   def assert_configure_secured(self, stackVersion=STACK_VERSION, snappy_enabled=True):
-    conf_dir = '/etc/hadoop/conf'
+    conf_dir = '/usr/hdp/current/hadoop-client/conf'
     if stackVersion != self.STACK_VERSION:
       conf_dir = '/usr/hdp/current/hadoop-client/conf'
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index ae51abf..805cd8b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1357,6 +1357,7 @@ class TestNamenode(RMFTestCase):
   @patch("hdfs_namenode.is_this_namenode_active")
   @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
   @patch("utils.get_namenode_states")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
     is_active_nn_mock.return_value = True
 
@@ -1613,6 +1614,7 @@ class TestNamenode(RMFTestCase):
     self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
 
   @patch.object(shell, "call")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart_22_params(self, call_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
     with open(config_file, "r") as f:
@@ -1633,7 +1635,7 @@ class TestNamenode(RMFTestCase):
                        call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
     import sys
-    self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
     self.assertEquals("/usr/hdp/{0}/hadoop/libexec".format(version), sys.modules["params"].hadoop_libexec_dir)
     self.assertEquals("/usr/hdp/{0}/hadoop/bin".format(version), sys.modules["params"].hadoop_bin_dir)
     self.assertEquals("/usr/hdp/{0}/hadoop/sbin".format(version), sys.modules["params"].hadoop_bin)
@@ -1669,6 +1671,7 @@ class TestNamenode(RMFTestCase):
 
 
   @patch("namenode_upgrade.create_upgrade_marker", MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_express_upgrade_skips_safemode_and_directory_creation(self):
     """
     Tests that we wait for Safemode to be OFF no matter what except for EU. And, because of that,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 0ce6282..452c0b6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -22,7 +22,19 @@ import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
+from resource_management.libraries.functions import stack_features
+
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == "rolling_upgrade":
+    return True
+  if stack_feature == "config_versioning":
+    return True
+
+  return False
+
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0,'123','')))
+@patch.object(stack_features, "check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestHiveMetastore(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -49,13 +61,13 @@ class TestHiveMetastore(RMFTestCase):
 
     self.assert_configure_default()
     self.assert_init_schema()
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
         environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
         path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -110,13 +122,13 @@ class TestHiveMetastore(RMFTestCase):
     )
     self.assert_configure_secured()
     self.assert_init_schema()
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.1.0.0-1234/hadoop/bin'],
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
         path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -194,7 +206,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -202,7 +214,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -233,7 +245,7 @@ class TestHiveMetastore(RMFTestCase):
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -308,7 +320,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -316,7 +328,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -333,7 +345,7 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/zkmigrator_jaas.conf',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/zkmigrator_jaas.conf',
                               content = Template('zkmigrator_jaas.conf.j2'),
                               owner = 'hive',
                               group = 'hadoop',
@@ -352,7 +364,7 @@ class TestHiveMetastore(RMFTestCase):
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -385,8 +397,8 @@ class TestHiveMetastore(RMFTestCase):
                               )
 
   def assert_init_schema(self):
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
-        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
+        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
         user = 'hive',
     )
 
@@ -540,7 +552,7 @@ class TestHiveMetastore(RMFTestCase):
         environment = {'HADOOP_HOME': '/usr/hdp/2.3.0.0-1234/hadoop', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': '/usr/hdp/current/hive-server2/bin/hive'},
         not_if = None,
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'])
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin'])
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
         path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],


[2/3] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 30e76ef..fd92277 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -30,10 +30,26 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import copy_tarball
 from resource_management.libraries import functions
 from resource_management.core.logger import Logger
+from resource_management.libraries.functions.constants import StackFeature
+from resource_management.libraries.functions import stack_features
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+
+  return False
 
 @patch.object(functions, "get_stack_version", new = MagicMock(return_value="2.0.0.0-1234"))
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0,'123','')))
+@patch.object(stack_select, "get_hadoop_dir", new=MagicMock(return_value="mock_hadoop_dir"))
+@patch.object(conf_select, "get_hadoop_conf_dir", new=MagicMock(return_value="/usr/hdp/current/hadoop-client/conf"))
+@patch.object(stack_features, "check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestHiveServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -72,17 +88,17 @@ class TestHiveServer(RMFTestCase):
     self.assert_configure_default()
 
     self.assertResourceCalled('Execute',
-                              'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment={'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+                              'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment={'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user='hive'
     )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
     self.assertResourceCalled('Execute',
                               '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -107,13 +123,13 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default(default_fs_default='hcfs://c6401.ambari.apache.org:8020')
 
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-                              environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+                              environment = {'HADOOP_HOME': 'mock_hadoop_dir',
                                              'HIVE_BIN': 'hive',
                                              'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
                               not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
                               user = 'hive',
-                              path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+                              path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
                               )
     self.assertResourceCalled('Execute',
                               '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -134,17 +150,17 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default()
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user = 'hive',
                               )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -165,17 +181,17 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default(no_tmp=True)
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user = 'hive',
                               )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -196,17 +212,17 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default(no_tmp=True)
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user = 'hive',
                               )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -237,7 +253,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
       action = ['delete'],
     )
-    
+
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -273,13 +289,13 @@ class TestHiveServer(RMFTestCase):
     )
 
     self.assert_configure_secured()
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
     self.assertResourceCalled('Execute',
                               '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -316,7 +332,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
       action = ['delete'],
     )
-    
+
     self.assertNoMoreResources()
 
   def assert_configure_default(self, no_tmp = False, default_fs_default='hdfs://c6401.ambari.apache.org:8020'):
@@ -324,13 +340,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
@@ -338,13 +354,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
@@ -357,13 +373,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         group = 'hadoop',
@@ -372,13 +388,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hive',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
@@ -387,14 +403,14 @@ class TestHiveServer(RMFTestCase):
       self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
           immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
           security_enabled = False,
-          hadoop_conf_dir = '/etc/hadoop/conf',
+          hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
           keytab = UnknownConfigurationMock(),
           kinit_path_local = '/usr/bin/kinit',
           user = 'hdfs',
           dfs_type = '',
           owner = 'hive',
           group = 'hdfs',
-          hadoop_bin_dir = '/usr/bin',
+          hadoop_bin_dir = 'mock_hadoop_dir',
           type = 'directory',
           action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
           mode = 0777,
@@ -402,13 +418,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
@@ -453,7 +469,7 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group='hadoop',
-                              conf_dir='/etc/hive/conf.server',
+                              conf_dir='/usr/hdp/current/hive-server2/conf/conf.server',
                               mode=0600,
                               configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                    u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -463,13 +479,13 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
       group = 'hadoop',
-      conf_dir = '/etc/hive/conf.server',
+      conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
       mode = 0600,
       owner = 'hive',
       configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
       configurations = self.getConfig()['configurations']['hiveserver2-site'],
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner='hive',
                               group='hadoop',
@@ -505,7 +521,7 @@ class TestHiveServer(RMFTestCase):
                               content=Template('startHiveserver2.sh.j2'),
                               mode=0755,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hiveserver2.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hiveserver2.properties.j2'),
@@ -537,13 +553,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
@@ -551,13 +567,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
@@ -566,13 +582,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         group = 'hadoop',
@@ -581,13 +597,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hive',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
@@ -595,14 +611,14 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
         group = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
@@ -610,13 +626,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
@@ -660,7 +676,7 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group='hadoop',
-                              conf_dir='/etc/hive/conf.server',
+                              conf_dir='/usr/hdp/current/hive-server2/conf/conf.server',
                               mode=0600,
                               configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                    u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -670,13 +686,13 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
       group = 'hadoop',
-      conf_dir = '/etc/hive/conf.server',
+      conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
       mode = 0600,
       owner = 'hive',
       configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
       configurations = self.getConfig()['configurations']['hiveserver2-site'],
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner='hive',
                               group='hadoop',
@@ -693,7 +709,7 @@ class TestHiveServer(RMFTestCase):
                               group='root',
                               mode=0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/zkmigrator_jaas.conf',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/zkmigrator_jaas.conf',
                               content = Template('zkmigrator_jaas.conf.j2'),
                               owner = 'hive',
                               group = 'hadoop',
@@ -717,7 +733,7 @@ class TestHiveServer(RMFTestCase):
                               content=Template('startHiveserver2.sh.j2'),
                               mode=0755,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hiveserver2.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hiveserver2.properties.j2'),
@@ -749,11 +765,11 @@ class TestHiveServer(RMFTestCase):
   @patch("socket.socket")
   def test_socket_timeout(self, socket_mock, time_mock):
     s = socket_mock.return_value
-    s.connect = MagicMock()    
+    s.connect = MagicMock()
     s.connect.side_effect = socket.error("")
-    
+
     time_mock.return_value = 1000
-    
+
     try:
       self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                            classname = "HiveServer",
@@ -762,7 +778,7 @@ class TestHiveServer(RMFTestCase):
                            stack_version = self.STACK_VERSION,
                            target = RMFTestCase.TARGET_COMMON_SERVICES
       )
-      
+
       self.fail("Script failure due to socket error was expected")
     except:
       self.assert_configure_default()
@@ -788,8 +804,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     )
 
     # ensure deregister is called
-    self.assertResourceCalledIgnoreEarlier('Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
-      path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
+    self.assertResourceCalledIgnoreEarlier('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
+      path=['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
       tries=1, user='hive')
 
     # ensure stop is called
@@ -820,8 +836,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     )
 
     # ensure that deregister is called
-    self.assertResourceCalledIgnoreEarlier( 'Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
-      path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
+    self.assertResourceCalledIgnoreEarlier( 'Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
+      path=['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
       tries=1, user='hive')
 
     # ensure hdp-select is called
@@ -874,7 +890,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
@@ -915,7 +931,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index ded4d45..3421189 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -296,7 +296,7 @@ class TestServiceCheck(RMFTestCase):
 
     self.assertResourceCalled('Execute', "env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare true",
         logoutput = True,
-        path = ['/usr/sbin','/usr/local/bin','/bin','/usr/bin', '/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/2.3.0.0-1234/hive/bin'],
+        path = ['/usr/sbin','/usr/local/bin','/bin','/usr/bin', '/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin:/usr/hdp/2.3.0.0-1234/hive/bin'],
         tries = 3,
         user = 'ambari-qa',
         try_sleep = 5)
@@ -340,7 +340,7 @@ class TestServiceCheck(RMFTestCase):
     # LLAP call
     self.assertResourceCalled('Execute',
       "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10500/;transportMode=binary' --hiveconf \"hiveLlapServiceCheck=\" -f /usr/hdp/current/hive-server2-hive2/scripts/llap/sql/serviceCheckScript.sql -e '' 2>&1| awk '{print}'|grep -i -e 'Invalid status\|Invalid URL\|command not found\|Connection refused'",
-      path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', '/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/2.3.0.0-1234/hive2/bin'],
+      path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', '/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin:/usr/hdp/2.3.0.0-1234/hive2/bin'],
       tries = 1,
       stderr = -1,
       wait_for_finish = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index 943c201..f9480ee 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -21,10 +21,22 @@ import json
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 from resource_management.core.exceptions import Fail
+from resource_management.libraries.functions import StackFeature
 
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+  if stack_feature == StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS:
+    return True
+
+  return False
 
 @patch("os.path.isfile", new = MagicMock(return_value=True))
 @patch("glob.glob", new = MagicMock(return_value=["one", "two"]))
+@patch("resource_management.libraries.functions.stack_features.check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestWebHCatServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -136,7 +148,7 @@ class TestWebHCatServer(RMFTestCase):
 
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh start',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
+        environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop'},
         not_if = "ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1",
         user = 'hcat',
     )
@@ -153,7 +165,7 @@ class TestWebHCatServer(RMFTestCase):
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh stop',
                               user = 'hcat',
-                              environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client' }
+                              environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop' }
                               )
 
     self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/webhcat/webhcat.pid`',
@@ -207,7 +219,7 @@ class TestWebHCatServer(RMFTestCase):
                               create_parents = True,
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               create_parents = True,
@@ -216,20 +228,20 @@ class TestWebHCatServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive-webhcat/conf',
+                              conf_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               configurations = self.getConfig()['configurations']['webhcat-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
                               owner = 'hcat',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
         cd_access = 'a',
         create_parents = True
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-log4j.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-log4j.properties',
                               content = InlineTemplate('log4jproperties\nline2'),
                               owner = 'hcat',
                               group = 'hadoop',
@@ -249,7 +261,7 @@ class TestWebHCatServer(RMFTestCase):
                               create_parents = True,
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               create_parents = True,
@@ -258,32 +270,35 @@ class TestWebHCatServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive-webhcat/conf',
+                              conf_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               configurations = self.getConfig()['configurations']['webhcat-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-env.sh',
+
+    self.assertResourceCalledIgnoreEarlier('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
                               owner = 'hcat',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
         cd_access = 'a',
         create_parents = True
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-log4j.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-log4j.properties',
                               content = InlineTemplate('log4jproperties\nline2'),
                               owner = 'hcat',
                               group = 'hadoop',
                               mode = 0644,
                               )
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
       json_content = json.load(f)
     version = '2.2.1.0-3242'
     json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "pre_upgrade_restart",
@@ -296,6 +311,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   @patch("resource_management.core.shell.call")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart_23(self, call_mock):
     import sys
 
@@ -326,6 +342,7 @@ class TestWebHCatServer(RMFTestCase):
 
 
   @patch("resource_management.core.shell.call")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_rolling_restart_configure(self, call_mock):
     import sys
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index a5dac6c..4d94723 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -1177,6 +1177,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1228,6 +1229,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_23(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1252,6 +1254,7 @@ class TestOozieServer(RMFTestCase):
 
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
 
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
@@ -1287,6 +1290,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value=True))
   def test_upgrade_23_with_type(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1311,6 +1315,7 @@ class TestOozieServer(RMFTestCase):
 
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
     json_content['upgradeSummary'] = {
       'services': { 'OOZIE': { 'sourceStackId': 'HDP-2.3' }},
       'direction': 'UPGRADE',
@@ -1353,6 +1358,7 @@ class TestOozieServer(RMFTestCase):
   @patch("os.remove")
   @patch("shutil.rmtree", new = MagicMock())
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_downgrade_no_compression_library_copy(self, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1397,6 +1403,7 @@ class TestOozieServer(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_name'] = "HDP"
+    json_content['hostLevelParams']['stack_version'] = "2.3"
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server_upgrade.py",
       classname = "OozieUpgrade", command = "upgrade_oozie_database_and_sharelib",
@@ -1459,6 +1466,7 @@ class TestOozieServer(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_name'] = "HDP"
+    json_content['hostLevelParams']['stack_version'] = "2.3"
 
     # use mysql external database
     json_content['configurations']['oozie-site']['oozie.service.JPAService.jdbc.driver'] = "com.mysql.jdbc.Driver"
@@ -1525,6 +1533,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_23_ensure_falcon_copied(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
index 468f602..4edaacc 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
@@ -30,6 +30,7 @@ class TestOozieServiceCheck(RMFTestCase):
 
   @patch("resource_management.core.shell.call")
   @patch("glob.glob")
+  @patch("resource_management.libraries.functions.stack_select.get_hadoop_dir", new = MagicMock(return_value = "/usr/hdp/current/hadoop-client"))
   def test_service_check(self, glob_mock, call_mock):
     glob_mock.return_value = ["examples-dir", "b"]
 
@@ -39,7 +40,7 @@ class TestOozieServiceCheck(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_name'] = 'HDP'
-    json_content['hostLevelParams']['stack_version'] = '2.2'
+    json_content['hostLevelParams']['stack_version'] = '2.3'
     json_content['configurations']['oozie-env']['service_check_job_name'] = 'map-reduce'
 
     mocks_dict = {}
@@ -65,7 +66,7 @@ class TestOozieServiceCheck(RMFTestCase):
       mode = 0755)
 
     self.assertResourceCalled('Execute',
-      ('/tmp/prepareOozieHdfsDirectories.sh', '/usr/hdp/current/oozie-client/conf', 'examples-dir', '/usr/hdp/current/hadoop-client/conf', 'c6402.ambari.apache.org:8050', 'hdfs://c6401.ambari.apache.org:8020', 'default', 'map-reduce'),
+      ('/tmp/prepareOozieHdfsDirectories.sh', '/usr/hdp/current/oozie-client/conf', 'examples-dir', '/usr/hdp/2.3.0.0-1234/hadoop/conf', 'c6402.ambari.apache.org:8050', 'hdfs://c6401.ambari.apache.org:8020', 'default', 'map-reduce'),
       tries = 3,
       try_sleep = 5,
       logoutput = True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 50b6583..1ebf530 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -756,6 +756,7 @@ class TestHistoryServer(RMFTestCase):
 
   @patch.object(functions, "get_stack_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
@@ -784,13 +785,13 @@ class TestHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.3.0.0-1234/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
     )
 
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 75eff39..43241a4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -421,6 +421,7 @@ class TestMapReduce2Client(RMFTestCase):
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_stack_upgrade_save_new_config(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/client-upgrade.json"
     with open(config_file, "r") as f:

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 48498be..92f5011 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -90,10 +90,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
-      only_if="ls /usr/hdp/current/hadoop-client/conf")
+      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")
 
     package_dirs = conf_select.get_package_dirs();
     for package, dir_defs in package_dirs.iteritems():
@@ -158,10 +158,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
-      only_if="ls /usr/hdp/current/hadoop-client/conf")
+      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")
 
     package_dirs = conf_select.get_package_dirs();
     for package, dir_defs in package_dirs.iteritems():
@@ -259,10 +259,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
-      only_if="ls /usr/hdp/current/hadoop-client/conf")
+      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")
 
     package_dirs = conf_select.get_package_dirs();
     for package, dir_defs in package_dirs.iteritems():

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
index 9dceb69..72f838c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
@@ -182,15 +182,6 @@ class TestHookBeforeInstall(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hadoop',
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              owner = 'root',
-                              create_parents = True,
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              not_if = 'ls /etc/hadoop/conf',
-                              to = '/etc/hadoop/conf.empty',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
                               owner = 'hdfs',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 239e2b5..7f2ed46 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -236,8 +236,8 @@ class TestFalconServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
       '/usr/hdp/current/falcon-server/bin/falcon-stop',
-      path = ['/usr/hdp/current/hadoop-client/bin'], user='falcon',
-      environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'})
+      path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'], user='falcon',
+      environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'})
 
     self.assertResourceCalled('File', '/var/run/falcon/falcon.pid',
       action = ['delete'])
@@ -342,7 +342,7 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/falcon',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2135/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
@@ -351,7 +351,7 @@ class TestFalconServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'falcon',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
@@ -359,7 +359,7 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/data-mirroring',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2135/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/usr/hdp/current/falcon-server/data-mirroring',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -372,7 +372,7 @@ class TestFalconServer(RMFTestCase):
         recursive_chown = True,
         owner = 'falcon',
         group = 'users',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
@@ -380,7 +380,7 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2135/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
@@ -389,7 +389,7 @@ class TestFalconServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
         owner = 'falcon',
@@ -404,17 +404,17 @@ class TestFalconServer(RMFTestCase):
         owner = 'falcon',
         create_parents = True,
     )
-   
+
     self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh server falcon',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
-        path = ['/usr/hdp/current/hadoop-client/bin'],
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
+        path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
         user = 'falcon',
         not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
     )
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start -port 15000',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
-        path = ['/usr/hdp/current/hadoop-client/bin'],
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
+        path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
         user = 'falcon',
         not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 82ade7c..e497f33 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -23,11 +23,22 @@ import os
 
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
-from resource_management.libraries.functions.constants import Direction
-from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.constants import Direction, StackFeature
+
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+  if stack_feature == StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA:
+    return True
+
+  return False
 
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0,'123','')))
+@patch("resource_management.libraries.functions.stack_features.check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestHiveMetastore(RMFTestCase):
 
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
@@ -58,13 +69,13 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assert_init_schema('aaa')
 
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.2.1.0-2067/hadoop/bin'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
@@ -89,13 +100,13 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assert_init_schema('aaa')
 
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.2.1.0-2067/hadoop/bin'],
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -152,13 +163,13 @@ class TestHiveMetastore(RMFTestCase):
 
     self.assert_configure_secured()
     self.assert_init_schema('asd')
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
         environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive asd com.mysql.jdbc.Driver',
@@ -223,7 +234,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -231,7 +242,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -262,7 +273,7 @@ class TestHiveMetastore(RMFTestCase):
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -325,7 +336,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -333,7 +344,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -350,7 +361,7 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/zkmigrator_jaas.conf',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/zkmigrator_jaas.conf',
                               content = Template('zkmigrator_jaas.conf.j2'),
                               owner = 'hive',
                               group = 'hadoop',
@@ -370,7 +381,7 @@ class TestHiveMetastore(RMFTestCase):
         mode = 0644,
     )
 
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -404,8 +415,8 @@ class TestHiveMetastore(RMFTestCase):
                               )
 
   def assert_init_schema(self, password):
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord {password} -verbose'.format(password = password),
-        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord {password} -verbose'".format(password = password),
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord {password} -verbose'.format(password = password),
+        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord {password} -verbose'".format(password = password),
         user = 'hive',
     )
 
@@ -450,7 +461,7 @@ class TestHiveMetastore(RMFTestCase):
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-metastore', version), sudo=True,)
     self.assertNoMoreResources()
 
-
+  @patch("resource_management.libraries.functions.stack_select.get_hadoop_dir", new = MagicMock(return_value="/usr/hdp/current/hadoop-client"))
   def test_pre_upgrade_restart_ims(self):
     """
     Tests the state of the init_metastore_schema property on update
@@ -577,7 +588,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalledIgnoreEarlier('Directory', '/var/lib/hive', owner = 'hive', group = 'hadoop',
       mode = 0755, create_parents = True, cd_access = 'a')
 
-    self.assertResourceCalled('Execute', ('rm', '-f', '/usr/hdp/current/hive-metastore/lib/ojdbc6.jar'),
+    self.assertResourceCalledIgnoreEarlier('Execute', ('rm', '-f', '/usr/hdp/current/hive-metastore/lib/ojdbc6.jar'),
         path = ['/bin', '/usr/bin/'],
         sudo = True)
 
@@ -614,6 +625,7 @@ class TestHiveMetastore(RMFTestCase):
   @patch("os.path.exists")
   @patch("resource_management.core.shell.call")
   @patch("resource_management.libraries.functions.get_stack_version")
+  @patch("resource_management.libraries.functions.stack_select.get_hadoop_dir", new = MagicMock(return_value = "/usr/hdp/current/hadoop-client"))
   def test_upgrade_metastore_schema_using_new_db(self, get_stack_version_mock, call_mock, os_path_exists_mock):
     get_stack_version_mock.return_value = '2.3.0.0-1234'
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index d3508e1..a337b0f 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -20,7 +20,20 @@ limitations under the License.
 from mock.mock import patch, MagicMock
 
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions.constants import StackFeature
 
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+  if stack_feature == StackFeature.PIG_ON_TEZ:
+    return True
+
+  return False
+
+@patch("resource_management.libraries.functions.stack_features.check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestPigServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 40c9d47..922ace2 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -59,7 +59,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -68,7 +68,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
@@ -126,7 +126,7 @@ class TestJobHistoryServer(RMFTestCase):
         hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         default_fs= UnknownConfigurationMock(),
         hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-        hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir='/etc/hadoop/conf',
         hdfs_site=UnknownConfigurationMock(),
         keytab=UnknownConfigurationMock(),
         kinit_path_local='/usr/bin/kinit',
@@ -176,7 +176,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/spark',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -185,7 +185,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0775,
@@ -193,7 +193,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -202,7 +202,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',
@@ -265,7 +265,7 @@ class TestJobHistoryServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = UnknownConfigurationMock(),
         owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         dfs_type = '',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
@@ -283,7 +283,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = UnknownConfigurationMock(),
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
index 96a2e9f..e136fa9 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
@@ -50,7 +50,7 @@ class TestMahoutClient(RMFTestCase):
       owner = "yarn",
       group = 'hadoop',
       mode = 0644,
-      conf_dir = '/usr/hdp/current/hadoop-client/conf',
+      conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
       configurations = self.getConfig()['configurations']['yarn-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/5433e479/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index 7bda992..b80476c 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -45,7 +45,7 @@ class TestMahoutClient(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               kinit_path_local = '/usr/bin/kinit',
                               user = 'hdfs',
@@ -53,64 +53,64 @@ class TestMahoutClient(RMFTestCase):
                               mode = 0770,
                               owner = 'ambari-qa',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
                               type = 'directory',
                               )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeoutput',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'ambari-qa',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput/sample-mahout-test.txt',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         source = '/tmp/sample-mahout-test.txt',
         user = 'hdfs',
         dfs_type = '',
         owner = 'ambari-qa',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
     )
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
                                          'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
                                          '--charset utf-8',
-                              environment = {'HADOOP_CONF_DIR': '/usr/hdp/current/hadoop-client/conf',
-                                             'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+                              environment = {'HADOOP_CONF_DIR': '/usr/hdp/2.2.1.0-2067/hadoop/conf',
+                                             'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
                                              'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
                                              'MAHOUT_HOME': '/usr/hdp/current/mahout-client'},
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -121,9 +121,9 @@ class TestMahoutClient(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mahoutsmokeoutput/_SUCCESS',
                               try_sleep = 6,
                               tries = 10,
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
                               user = 'ambari-qa',
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
                               )
     self.assertNoMoreResources()