You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2015/12/05 02:45:49 UTC

[1/2] ambari git commit: AMBARI-14220 - Express Upgrades Fail To Stop Services Because Of Destroyed Configurations (jonathanhurley)

Repository: ambari
Updated Branches:
  refs/heads/trunk dfb18d978 -> 357d196ef


http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
index d8c79ac..e5d8653 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
@@ -221,7 +221,7 @@ class TestStormSupervisor(TestStormBase):
                      call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-supervisor', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
index ff43e43..8017ec2 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
@@ -190,7 +190,7 @@ class TestStormSupervisor(TestStormBase):
                      call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-supervisor', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
index dc00139..51296c9 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
@@ -174,7 +174,7 @@ class TestStormUiServer(TestStormBase):
                      call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)
     self.assertEquals(1, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index de9e7cc..a23435b 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -108,7 +108,7 @@ class TestTezClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(2, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index 6118cc8..f488509 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -412,7 +412,7 @@ class TestAppTimelineServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-timelineserver', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-timelineserver', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py b/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
index f3f8d67..4f75f62 100644
--- a/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
@@ -67,7 +67,7 @@ class TestAccumuloClient(RMFTestCase):
       call_mocks = [(0, None, ''), (0, None)],
       mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'accumulo-client', version), sudo=True,)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'accumulo-client', version), sudo=True,)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
index 2259b47..1c782bd 100644
--- a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
+++ b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
@@ -134,7 +134,7 @@ class TestKafkaBroker(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'kafka-broker', version), sudo=True,)
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
index 4edb7eb..78fc07f 100644
--- a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
+++ b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
@@ -307,9 +307,7 @@ class TestKnoxGateway(RMFTestCase):
      '/var/lib/knox/data'),
         sudo = True,  tries = 3, try_sleep = 1,
     )
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'knox-server', version),
-        sudo = True,
-    )
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'knox-server', version),sudo = True)
     self.assertResourceCalled('Execute', ('cp',
      '/tmp/knox-upgrade-backup/knox-conf-backup.tar',
      '/usr/hdp/current/knox-server/conf/knox-conf-backup.tar'),
@@ -379,9 +377,7 @@ class TestKnoxGateway(RMFTestCase):
      '/var/lib/knox/data'),
         sudo = True, tries = 3, try_sleep = 1,
     )
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'knox-server', version),
-        sudo = True,
-    )
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'knox-server', version),sudo = True)
     self.assertResourceCalled('Execute', ('cp',
      '/tmp/knox-upgrade-backup/knox-conf-backup.tar',
      '/usr/hdp/current/knox-server/conf/knox-conf-backup.tar'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py
index 80bbbaf..4c7792d 100644
--- a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py
+++ b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py
@@ -211,7 +211,7 @@ class TestRangerAdmin(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-admin', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-admin', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)
     self.assertEquals(1, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py
index 5cba731..2828983 100644
--- a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py
+++ b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py
@@ -148,7 +148,7 @@ class TestRangerUsersync(RMFTestCase):
     self.assertResourceCalled("Execute", ("/usr/bin/ranger-usersync-stop",),
                               environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_67'},
                               sudo = True)
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-usersync', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-usersync', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(2, mocks_dict['call'].call_count)
     self.assertEquals(1, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
index 3d01993..de00077 100644
--- a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
@@ -140,8 +140,8 @@ class TestSliderClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'slider-client', '2.3.0.0-1234'), sudo=True)
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'slider-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.3.0.0-1234'), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(2, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index a3319aa..d4fef77 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -302,7 +302,7 @@ class TestJobHistoryServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-historyserver', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-historyserver', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index 385b2d7..841a4c0 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -140,7 +140,7 @@ class TestSparkClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-client', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
index 06645a9..db8c0d9 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
@@ -103,9 +103,7 @@ class TestMahoutClient(RMFTestCase):
       call_mocks = itertools.cycle([(0, None, '')]),
       mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'mahout-client', '2.3.0.0-1234'),
-        sudo = True,
-    )
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'mahout-client', '2.3.0.0-1234'),sudo = True)
     self.assertNoMoreResources()
 
     import sys

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index 1968f3b..22e2eda 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -165,7 +165,7 @@ class TestSparkThriftServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-thriftserver', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-thriftserver', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)


[2/2] ambari git commit: AMBARI-14220 - Express Upgrades Fail To Stop Services Because Of Destroyed Configurations (jonathanhurley)

Posted by jo...@apache.org.
AMBARI-14220 - Express Upgrades Fail To Stop Services Because Of Destroyed Configurations (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/357d196e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/357d196e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/357d196e

Branch: refs/heads/trunk
Commit: 357d196efcf19af956faff3f7051d16bdeb74263
Parents: dfb18d9
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Dec 4 15:44:52 2015 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Dec 4 20:26:51 2015 -0500

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 193 ++++++++++++-------
 .../controller/AmbariActionExecutionHelper.java |  81 ++++----
 .../AmbariCustomCommandExecutionHelper.java     |   7 +-
 .../custom_actions/scripts/install_packages.py  |  52 ++++-
 .../custom_actions/scripts/ru_set_all.py        |  11 +-
 .../scripts/shared_initialization.py            |  80 +-------
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   6 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   2 +-
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   2 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   2 +-
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   1 +
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |   4 +-
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   1 +
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   1 +
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   3 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   1 +
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |   2 +-
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   2 +-
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |   1 +
 .../stacks/2.0.6/YARN/test_historyserver.py     |   2 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   2 +-
 .../stacks/2.0.6/YARN/test_nodemanager.py       |   2 +-
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |   2 +-
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   2 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |   2 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |   1 +
 .../hooks/after-INSTALL/test_after_install.py   | 192 +++++++++---------
 .../stacks/2.1/FALCON/test_falcon_client.py     |   2 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |   2 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   4 +
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |   2 +-
 .../stacks/2.1/STORM/test_storm_nimbus.py       |   2 +-
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |   2 +-
 .../stacks/2.1/STORM/test_storm_supervisor.py   |   2 +-
 .../2.1/STORM/test_storm_supervisor_prod.py     |   2 +-
 .../stacks/2.1/STORM/test_storm_ui_server.py    |   2 +-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |   2 +-
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   2 +-
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |   2 +-
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   2 +-
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |   8 +-
 .../stacks/2.2/RANGER/test_ranger_admin.py      |   2 +-
 .../stacks/2.2/RANGER/test_ranger_usersync.py   |   2 +-
 .../stacks/2.2/SLIDER/test_slider_client.py     |   4 +-
 .../stacks/2.2/SPARK/test_job_history_server.py |   2 +-
 .../stacks/2.2/SPARK/test_spark_client.py       |   2 +-
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   4 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   2 +-
 48 files changed, 381 insertions(+), 330 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 24ca151..633ab7c 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -26,9 +26,14 @@ import hdp_select
 import subprocess
 
 from resource_management.core import shell
+from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script
 from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Directory, Link
+from resource_management.core.resources.system import Directory
+from resource_management.core.resources.system import Execute
+from resource_management.core.resources.system import Link
+from resource_management.core.shell import as_sudo
+
 
 PACKAGE_DIRS = {
   "accumulo": [
@@ -239,41 +244,30 @@ def select(stack_name, package, version, try_create=True):
 
   shell.checked_call(get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
 
-  # Create the symbolic link using 'PACKAGE_DIRS' for the given package
-  # Starting with 2.3, we have sym links instead of flat directories.
-  # Eg: /etc/<service-name>/conf -> /etc/<service-name>/2.3.x.y-<version>/0
-  # But, in case of Express upgrade from HDP 2.1-> HDP 2.3 where we have
-  # deleted the /etc/<service-name>/conf directory, the above mentioned
-  # symlink needs to be created here.
+  # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
+  # points to /usr/hdp/current/<component>/conf - this is because some people still prefer to
+  # use /etc/<component>/conf even though /usr/hdp is the "future"
   if package in PACKAGE_DIRS:
-    conf_dirs = PACKAGE_DIRS[package]
-    Logger.info("For package : {0}, DIRS = {1}".format(package, conf_dirs))
-    for dirInfo in conf_dirs:
-      if "conf_dir" in dirInfo and "current_dir" in dirInfo:
-        conf_dir = dirInfo["conf_dir"]
-        current_dir = dirInfo["current_dir"]
-        Logger.info("For package : {0}, Source dir: {1}, Dest dir: {2}".format(package, conf_dir, current_dir))
-        if os.path.exists(current_dir):
-          real_path_of_current_dir = os.path.realpath(current_dir)
-          normalized_conf_dir = (os.path.normpath(conf_dir)).strip()
-          normalized_current_dir = (os.path.normpath(real_path_of_current_dir)).strip()
-          Logger.info("Normalized Conf Dir : {0}, Normalized Current Dir : {1}".format(normalized_conf_dir, normalized_current_dir))
-          if os.path.isdir(normalized_current_dir) and normalized_current_dir != normalized_conf_dir:
-            if not os.path.isdir(normalized_conf_dir) and not os.path.islink(normalized_conf_dir):
-              Link(normalized_conf_dir,
-                   to=normalized_current_dir)
-              Logger.info("{0} directory doesn't exist. Created Symlink : {1} -> {2}".format(normalized_conf_dir, normalized_conf_dir, normalized_current_dir))
-              return
-            # In case, 'normalized_conf_dir' does have a symlink and it's not the one mentioned in 'PACKAGE_DIRS',
-            # we remove the symlink and make it point to correct symlink.
-            if os.path.islink(normalized_conf_dir) and os.readlink(normalized_conf_dir) != normalized_current_dir:
-              Logger.info("{0} exists and points to incorrect path {1}".format(normalized_conf_dir, os.readlink(normalized_conf_dir)))
-              Link(normalized_conf_dir,
-                   action="delete")
-              Logger.info("Removed existing symlink for {0}".format(normalized_conf_dir))
-              Link(normalized_conf_dir,
-                   to=normalized_current_dir)
-              Logger.info("Created Symlink : {0} -> {1}".format(normalized_conf_dir, normalized_current_dir))
+    Logger.info("Ensuring that {0} has the correct symlink structure".format(package))
+
+    directory_list = PACKAGE_DIRS[package]
+    for directory_structure in directory_list:
+      conf_dir = directory_structure["conf_dir"]
+      current_dir = directory_structure["current_dir"]
+
+      # if /etc/<component>/conf is not a symlink, we need to change it
+      if not os.path.islink(conf_dir):
+        # if it exists, try to back it up
+        if os.path.exists(conf_dir):
+          parent_directory = os.path.dirname(conf_dir)
+          conf_install_dir = os.path.join(parent_directory, "conf.backup")
+
+          Execute(("cp", "-R", "-p", conf_dir, conf_install_dir),
+            not_if = format("test -e {conf_install_dir}"), sudo = True)
+
+          Directory(conf_dir, action="delete")
+
+        Link(conf_dir, to = current_dir)
 
 
 def get_hadoop_conf_dir(force_latest_on_upgrade=False):
@@ -322,39 +316,106 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
   return hadoop_conf_dir
 
 
-def create_config_links(stack_id, stack_version):
+def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to_conf_install=False):
+
   """
-  Creates config links
-  stack_id:  stack id, ie HDP-2.3
-  stack_version:  version to set, ie 2.3.0.0-1234
+  Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package.
+  If the package does not exist, then no work is performed.
+
+  - Creates a /etc/<component>/conf.backup directory
+  - Copies all configs from /etc/<component>/conf to conf.backup
+  - Removes /etc/<component>/conf
+  - Creates /etc/<component>/<version>/0
+  - Creates /usr/hdp/current/<component>-client/conf -> /etc/<component>/<version>/0
+  - Links /etc/<component>/conf to <something>
+  -- /etc/<component>/conf -> /usr/hdp/current/[component]-client/conf
+  -- /etc/<component>/conf -> /etc/<component>/conf.backup
+
+  :param package: the package to create symlinks for (zookeeper, falcon, etc)
+  :param version: the version number to use with conf-select (2.3.0.0-1234)
+  :param dirs: the directories associated with the package (from PACKAGE_DIRS)
+  :param skip_existing_links: True to not do any work if already a symlink
+  :param link_to_conf_install:
   """
-  if stack_id is None:
-    Logger.info("Cannot create config links when stack_id is not defined")
-    return
-  args = stack_id.upper().split('-')
-  if len(args) != 2:
-    Logger.info("Unrecognized stack id {0}".format(stack_id))
+  bad_dirs = []
+  for dir_def in dirs:
+    if not os.path.exists(dir_def['conf_dir']):
+      bad_dirs.append(dir_def['conf_dir'])
+
+  if len(bad_dirs) > 0:
+    Logger.info("Skipping {0} as it does not exist.".format(",".join(bad_dirs)))
     return
-  if args[0] != "HDP":
-    Logger.info("Unrecognized stack name {0}".format(args[0]))
-  if version.compare_versions(version.format_hdp_stack_version(args[1]), "2.3.0.0") < 0:
-    Logger.info("Cannot link configs unless HDP-2.3 or higher")
+
+  # existing links should be skipped since we assume there's no work to do
+  if skip_existing_links:
+    bad_dirs = []
+    for dir_def in dirs:
+      # check if conf is a link already
+      old_conf = dir_def['conf_dir']
+      if os.path.islink(old_conf):
+        Logger.info("{0} is already link to {1}".format(old_conf, os.path.realpath(old_conf)))
+        bad_dirs.append(old_conf)
+
+  if len(bad_dirs) > 0:
     return
-  for k, v in PACKAGE_DIRS.iteritems():
-    dirs = create(args[0], k, stack_version, dry_run = True)
-    if 0 == len(dirs):
-      Logger.debug("Package {0} is not installed".format(k))
-    else:
-      need = False
-      for new_conf_dir in dirs:
-        if not os.path.exists(new_conf_dir):
-          need = True
-
-      if need:
-        Logger.info("Creating conf dirs {0} for {1}".format(",".join(dirs), k))
-        try:
-          select(args[0], k, stack_version)
-        except Exception, err:
-          # don't ruin someone's day
-          Logger.logger.exception("'conf-select set' failed to link '{0}'. Error: {1}".format(k, str(err)))
+
+  # make backup dir and copy everything in case configure() was called after install()
+  for dir_def in dirs:
+    old_conf = dir_def['conf_dir']
+    old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
+    conf_install_dir = os.path.join(old_parent, "conf.backup")
+    Execute(("cp", "-R", "-p", old_conf, conf_install_dir),
+      not_if = format("test -e {conf_install_dir}"), sudo = True)
+
+  # we're already in the HDP stack
+  versioned_confs = create("HDP", package, version, dry_run = True)
+
+  Logger.info("New conf directories: {0}".format(", ".join(versioned_confs)))
+
+  need_dirs = []
+  for d in versioned_confs:
+    if not os.path.exists(d):
+      need_dirs.append(d)
+
+  if len(need_dirs) > 0:
+    create("HDP", package, version)
+
+    # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory
+    if len(dirs) > 1:
+      for need_dir in need_dirs:
+        for dir_def in dirs:
+          if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
+            old_conf = dir_def['conf_dir']
+            versioned_conf = need_dir
+            Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
+              only_if = format("ls {old_conf}/*"))
+    elif 1 == len(dirs) and 1 == len(need_dirs):
+      old_conf = dirs[0]['conf_dir']
+      versioned_conf = need_dirs[0]
+      Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
+        only_if = format("ls {old_conf}/*"))
+
+
+  # make /usr/hdp/[version]/[component]/conf point to the versioned config.
+  # /usr/hdp/current is already set
+  try:
+    select("HDP", package, version)
+
+    # no more references to /etc/[component]/conf
+    for dir_def in dirs:
+      new_symlink = dir_def['conf_dir']
+
+      # remove new_symlink to pave the way, but only if it's a directory
+      if not os.path.islink(new_symlink):
+        Directory(new_symlink, action="delete")
+
+      # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
+      if link_to_conf_install:
+        Link(new_symlink, to = conf_install_dir)
+      else:
+        Link(new_symlink, to = dir_def['current_dir'])
+  except Exception, e:
+    Logger.warning("Could not select the directory: {0}".format(e.message))
+
+  # should conf.backup be removed?
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index d299602..f137996 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -18,10 +18,20 @@
 
 package org.apache.ambari.server.controller;
 
-import com.google.gson.JsonArray;
-import com.google.gson.JsonObject;
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ObjectNotFoundException;
 import org.apache.ambari.server.Role;
@@ -52,18 +62,10 @@ import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
 
 /**
  * Helper class containing logic to process custom action execution requests
@@ -470,28 +472,37 @@ public class AmbariActionExecutionHelper {
   * */
 
   private void addRepoInfoToHostLevelParams(Cluster cluster, Map<String, String> hostLevelParams, String hostName) throws AmbariException {
-    if (cluster != null) {
-      JsonObject rootJsonObject = new JsonObject();
-      JsonArray repositories = new JsonArray();
-      ClusterVersionEntity clusterVersionEntity = clusterVersionDAO.findByClusterAndStateCurrent(cluster.getClusterName());
-      if (clusterVersionEntity != null && clusterVersionEntity.getRepositoryVersion() != null) {
-        String hostOsFamily = clusters.getHost(hostName).getOsFamily();
-        for (OperatingSystemEntity operatingSystemEntity : clusterVersionEntity.getRepositoryVersion().getOperatingSystems()) {
-          // ostype in OperatingSystemEntity it's os family. That should be fixed in OperatingSystemEntity.
-          if (operatingSystemEntity.getOsType().equals(hostOsFamily)) {
-            for (RepositoryEntity repositoryEntity : operatingSystemEntity.getRepositories()) {
-              JsonObject repositoryInfo = new JsonObject();
-              repositoryInfo.addProperty("base_url", repositoryEntity.getBaseUrl());
-              repositoryInfo.addProperty("repo_name", repositoryEntity.getName());
-              repositoryInfo.addProperty("repo_id", repositoryEntity.getRepositoryId());
-
-              repositories.add(repositoryInfo);
-            }
-            rootJsonObject.add("repositories", repositories);
+    if (null == cluster) {
+      return;
+    }
+
+    JsonObject rootJsonObject = new JsonObject();
+    JsonArray repositories = new JsonArray();
+    ClusterVersionEntity clusterVersionEntity = clusterVersionDAO.findByClusterAndStateCurrent(
+        cluster.getClusterName());
+    if (clusterVersionEntity != null && clusterVersionEntity.getRepositoryVersion() != null) {
+      String hostOsFamily = clusters.getHost(hostName).getOsFamily();
+      for (OperatingSystemEntity operatingSystemEntity : clusterVersionEntity.getRepositoryVersion().getOperatingSystems()) {
+        // ostype in OperatingSystemEntity it's os family. That should be fixed
+        // in OperatingSystemEntity.
+        if (operatingSystemEntity.getOsType().equals(hostOsFamily)) {
+          for (RepositoryEntity repositoryEntity : operatingSystemEntity.getRepositories()) {
+            JsonObject repositoryInfo = new JsonObject();
+            repositoryInfo.addProperty("base_url", repositoryEntity.getBaseUrl());
+            repositoryInfo.addProperty("repo_name", repositoryEntity.getName());
+            repositoryInfo.addProperty("repo_id", repositoryEntity.getRepositoryId());
+
+            repositories.add(repositoryInfo);
           }
+          rootJsonObject.add("repositories", repositories);
         }
       }
-      hostLevelParams.put("repo_info", rootJsonObject.toString());
     }
+
+    hostLevelParams.put("repo_info", rootJsonObject.toString());
+
+    StackId stackId = cluster.getCurrentStackVersion();
+    hostLevelParams.put(STACK_NAME, stackId.getStackName());
+    hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 69709e5..2ed8f9d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -347,9 +347,11 @@ public class AmbariCustomCommandExecutionHelper {
       Map<String, String> hostLevelParams = new TreeMap<String, String>();
 
       hostLevelParams.put(CUSTOM_COMMAND, commandName);
+
       // Set parameters required for re-installing clients on restart
-      hostLevelParams.put(REPO_INFO, getRepoInfo
-        (cluster, host));
+      hostLevelParams.put(REPO_INFO, getRepoInfo(cluster, host));
+      hostLevelParams.put(STACK_NAME, stackId.getStackName());
+      hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
 
       Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster);
       String userList = gson.toJson(userSet);
@@ -1141,6 +1143,7 @@ public class AmbariCustomCommandExecutionHelper {
         clusterVersionEntity = clusterVersionEntityList.iterator().next();
       }
     }
+
     if (clusterVersionEntity != null) {
       hostLevelParams.put("current_version", clusterVersionEntity.getRepositoryVersion().getVersion());
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index 0170806..526101e 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -78,6 +78,13 @@ class InstallPackages(Script):
       package_list = json.loads(config['commandParams']['package_list'])
       stack_id = config['commandParams']['stack_id']
 
+    # current stack information
+    self.current_hdp_stack_version = None
+    if 'stack_version' in config['hostLevelParams']:
+      current_stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+      self.current_hdp_stack_version = format_hdp_stack_version(current_stack_version_unformatted)
+
+
     stack_name = None
     self.stack_root_folder = None
     if stack_id and "-" in stack_id:
@@ -137,10 +144,12 @@ class InstallPackages(Script):
     self.old_versions = get_hdp_versions(self.stack_root_folder)
 
     try:
+      is_package_install_successful = False
       ret_code = self.install_packages(package_list)
       if ret_code == 0:
         self.structured_output['package_installation_result'] = 'SUCCESS'
         self.put_structured_out(self.structured_output)
+        is_package_install_successful = True
       else:
         num_errors += 1
     except Exception, err:
@@ -151,10 +160,45 @@ class InstallPackages(Script):
     if num_errors > 0:
       raise Fail("Failed to distribute repositories/install packages")
 
-    if 'package_installation_result' in self.structured_output and \
-      'actual_version' in self.structured_output and \
-      self.structured_output['package_installation_result'] == 'SUCCESS':
-      conf_select.create_config_links(stack_id, self.structured_output['actual_version'])
+    # if installing a version of HDP that needs some symlink love, then create them
+    if is_package_install_successful and 'actual_version' in self.structured_output:
+      self._create_config_links_if_necessary(stack_id, self.structured_output['actual_version'])
+
+
+  def _create_config_links_if_necessary(self, stack_id, stack_version):
+    """
+    Sets up the required structure for /etc/<component>/conf symlinks and /usr/hdp/current
+    configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3
+
+    stack_id:  stack id, ie HDP-2.3
+    stack_version:  version to set, ie 2.3.0.0-1234
+    """
+    if stack_id is None:
+      Logger.info("Cannot create config links when stack_id is not defined")
+      return
+
+    args = stack_id.upper().split('-')
+    if len(args) != 2:
+      Logger.info("Unrecognized stack id {0}, cannot create config links".format(stack_id))
+      return
+
+    if args[0] != "HDP":
+      Logger.info("Unrecognized stack name {0}, cannot create config links".format(args[0]))
+
+    if version.compare_versions(version.format_hdp_stack_version(args[1]), "2.3.0.0") < 0:
+      Logger.info("Configuration symlinks are not needed for {0}, only HDP-2.3+".format(stack_version))
+      return
+
+    # if already on HDP 2.3, then there's nothing to do in terms of linking configs
+    if self.current_hdp_stack_version and compare_versions(self.current_hdp_stack_version, '2.3') >= 0:
+      Logger.info("The current cluster stack of {0} does not require linking configurations".format(stack_version))
+      return
+
+    # link configs for all known packages
+    for package_name, directories in conf_select.PACKAGE_DIRS.iteritems():
+      conf_select.convert_conf_directories_to_symlinks(package_name, stack_version, directories,
+        skip_existing_links = False, link_to_conf_install = True)
+
 
   def compute_actual_version(self):
     """

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
index 7bfd820..3414c22 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
@@ -154,14 +154,19 @@ def link_config(old_conf, link_conf):
   :old_conf: the old config directory, ie /etc/[component]/conf
   :link_conf: the new target for the config directory, ie /usr/hdp/current/[component-dir]/conf
   """
-  if not os.path.exists(old_conf):
-    Logger.debug("Skipping {0}; it does not exist".format(old_conf))
+  # if the link exists but is wrong, then change it
+  if os.path.islink(old_conf) and os.path.realpath(old_conf) != link_conf:
+    Link(old_conf, to = link_conf)
     return
-  
+
   if os.path.islink(old_conf):
     Logger.debug("Skipping {0}; it is already a link".format(old_conf))
     return
 
+  if not os.path.exists(old_conf):
+    Logger.debug("Skipping {0}; it does not exist".format(old_conf))
+    return
+
   old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
 
   Logger.info("Linking {0} to {1}".format(old_conf, link_conf))

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 3961897..d909926 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -21,7 +21,6 @@ import shutil
 
 import ambari_simplejson as json
 from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Directory, Link
 from resource_management.core.resources.system import Execute
 from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions import conf_select
@@ -89,81 +88,4 @@ def link_configs(struct_out_file):
     return
 
   for k, v in conf_select.PACKAGE_DIRS.iteritems():
-    _link_configs(k, json_version, v)
-
-def _link_configs(package, version, dirs):
-  """
-  Link a specific package's configuration directory
-  """
-  bad_dirs = []
-  for dir_def in dirs:
-    if not os.path.exists(dir_def['conf_dir']):
-      bad_dirs.append(dir_def['conf_dir'])
-
-  if len(bad_dirs) > 0:
-    Logger.debug("Skipping {0} as it does not exist.".format(",".join(bad_dirs)))
-    return
-
-  bad_dirs = []
-  for dir_def in dirs:
-    # check if conf is a link already
-    old_conf = dir_def['conf_dir']
-    if os.path.islink(old_conf):
-      Logger.debug("{0} is a link to {1}".format(old_conf, os.path.realpath(old_conf)))
-      bad_dirs.append(old_conf)
-
-  if len(bad_dirs) > 0:
-    return
-
-  # make backup dir and copy everything in case configure() was called after install()
-  for dir_def in dirs:
-    old_conf = dir_def['conf_dir']
-    old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
-    old_conf_copy = os.path.join(old_parent, "conf.install")
-    Execute(("cp", "-R", "-p", old_conf, old_conf_copy),
-      not_if = format("test -e {old_conf_copy}"), sudo = True)
-
-  # we're already in the HDP stack
-  versioned_confs = conf_select.create("HDP", package, version, dry_run = True)
-
-  Logger.info("New conf directories: {0}".format(", ".join(versioned_confs)))
-
-  need_dirs = []
-  for d in versioned_confs:
-    if not os.path.exists(d):
-      need_dirs.append(d)
-
-  if len(need_dirs) > 0:
-    conf_select.create("HDP", package, version)
-
-    # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory
-    if len(dirs) > 1:
-      for need_dir in need_dirs:
-        for dir_def in dirs:
-          if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
-            old_conf = dir_def['conf_dir']
-            versioned_conf = need_dir
-            Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
-              only_if = format("ls {old_conf}/*"))
-    elif 1 == len(dirs) and 1 == len(need_dirs):
-      old_conf = dirs[0]['conf_dir']
-      versioned_conf = need_dirs[0]
-      Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
-        only_if = format("ls {old_conf}/*"))
-
-
-  # make /usr/hdp/[version]/[component]/conf point to the versioned config.
-  # /usr/hdp/current is already set
-  try:
-    conf_select.select("HDP", package, version)
-
-    # no more references to /etc/[component]/conf
-    for dir_def in dirs:
-      Directory(dir_def['conf_dir'], action="delete")
-
-      # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
-      Link(dir_def['conf_dir'], to = dir_def['current_dir'])
-  except Exception, e:
-    Logger.warning("Could not select the directory: {0}".format(e.message))
-
-  # should conf.install be removed?
+    conf_select.convert_conf_directories_to_symlinks(k, json_version, v)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
index 3d55c31..a1c762a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -262,9 +262,9 @@ class TestHBaseClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-client', version), sudo=True)
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-client', version), sudo=True)
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
 
     self.assertEquals(3, mocks_dict['call'].call_count)
     self.assertEquals(6, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 2d422d4..3f6096c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -822,7 +822,7 @@ class TestHBaseMaster(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-master', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-master', version), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)
     self.assertEquals(3, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 3ec3b83..224d81d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -722,7 +722,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-regionserver', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-regionserver', version), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)
     self.assertEquals(3, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 56900c8..3260715 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -455,6 +455,6 @@ class TestPhoenixQueryServer(RMFTestCase):
         mode = 0755,
         cd_access = 'a',
     )
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-server', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-server', '2.3.0.0-1234'), sudo=True)
 
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index c94948c..8afb104 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -461,6 +461,7 @@ class TestDatanode(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
+    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-datanode', version), sudo=True,)
 
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
index eade3b9..1039a2d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
@@ -204,7 +204,7 @@ class Test(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)
@@ -228,5 +228,5 @@ class Test(RMFTestCase):
                        config_dict = json_content,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index f21f3dd..f8400a1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -481,6 +481,7 @@ class TestJournalnode(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
+    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-journalnode', version), sudo=True,)
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index 47cb378..e744219 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -388,6 +388,7 @@ class TestNFSGateway(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None), (0, None), (0, None)])
+    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-nfs3', version), sudo=True,)
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index 2788a04..f6402b8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -209,7 +209,8 @@ class TestHiveClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 69db9ee..cc0ed1c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -952,6 +952,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
                        call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
+    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute',
 
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', version), sudo=True,)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index af16a33..653074d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -384,7 +384,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertTrue(sys.modules["params"].webhcat_conf_dir is not None)
     self.assertTrue("/usr/hdp/current/hive-webhcat/etc/webhcat" == sys.modules["params"].webhcat_conf_dir)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-webhcat', version), sudo=True,)
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
index 85fbbad..9d4b062 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
@@ -166,7 +166,7 @@ class TestPigClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
index cf94b48..6717649 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
@@ -129,6 +129,7 @@ class TestSqoop(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
+    self.assertResourceCalled('Link', ('/etc/sqoop/conf'), to='/usr/hdp/current/sqoop-client/conf')
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'sqoop-client', version), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 88fbba7..48d55b1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -821,7 +821,7 @@ class TestHistoryServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-mapreduce-historyserver', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-mapreduce-historyserver', version), sudo=True)
     self.assertTrue(call("tez", "hadoop", "hdfs", host_sys_prepped=False) in copy_to_hdfs_mock.call_args_list)
     self.assertTrue(call("slider", "hadoop", "hdfs", host_sys_prepped=False) in copy_to_hdfs_mock.call_args_list)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 361376d..6d310d4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -406,7 +406,7 @@ class TestMapReduce2Client(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 6973ccf..5eb9840 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -722,7 +722,7 @@ class TestNodeManager(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-nodemanager', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-nodemanager', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index 2d24059..610f98c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -636,7 +636,7 @@ class TestResourceManager(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-resourcemanager', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-resourcemanager', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index 80d4ef1..199721e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -576,7 +576,7 @@ class TestYarnClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
+    self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
index 63c6fd8..0d24a73 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
@@ -196,7 +196,7 @@ class TestZookeeperClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'zookeeper-client', version), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
index f83df6f..336a3bc 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
@@ -382,6 +382,7 @@ class TestZookeeperServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
+    self.assertResourceCalledIgnoreEarlier('Link', ('/etc/zookeeper/conf'), to='/usr/hdp/current/zookeeper-client/conf')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'zookeeper-server', version), sudo=True)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index de3d7ce..490b3bf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -82,43 +82,43 @@ class TestHookAfterInstall(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
       only_if="ls /usr/hdp/current/hadoop-client/conf")
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.install'),
-        not_if = 'test -e /etc/ranger/kms/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
+        not_if = 'test -e /etc/ranger/kms/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
         to = '/usr/hdp/current/ranger-kms/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.install'),
-        not_if = 'test -e /etc/zookeeper/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
+        not_if = 'test -e /etc/zookeeper/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/zookeeper/conf',
         to = '/usr/hdp/current/zookeeper-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.install'),
-        not_if = 'test -e /etc/pig/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
+        not_if = 'test -e /etc/pig/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/pig/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/pig/conf',
         to = '/usr/hdp/current/pig-client/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.install'),
-        not_if = 'test -e /etc/tez/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
+        not_if = 'test -e /etc/tez/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/tez/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/tez/conf',
         to = '/usr/hdp/current/tez-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.install'),
-        not_if = 'test -e /etc/hive-webhcat/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
+        not_if = 'test -e /etc/hive-webhcat/conf.backup',
         sudo = True,)
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.install'),
-        not_if = 'test -e /etc/hive-hcatalog/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
+        not_if = 'test -e /etc/hive-hcatalog/conf.backup',
         sudo = True,)
 
     self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
@@ -131,120 +131,120 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
         to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.install'),
-        not_if = 'test -e /etc/hbase/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.backup'),
+        not_if = 'test -e /etc/hbase/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/hbase/conf',
         to = '/usr/hdp/current/hbase-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.install'),
-        not_if = 'test -e /etc/knox/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.backup'),
+        not_if = 'test -e /etc/knox/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/knox/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/knox/conf',
         to = '/usr/hdp/current/knox-server/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.install'),
-        not_if = 'test -e /etc/ranger/usersync/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
+        not_if = 'test -e /etc/ranger/usersync/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
         to = '/usr/hdp/current/ranger-usersync/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.install'),
-        not_if = 'test -e /etc/hadoop/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
+        not_if = 'test -e /etc/hadoop/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/hadoop/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/hadoop/conf',
         to = '/usr/hdp/current/hadoop-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.install'),
-        not_if = 'test -e /etc/mahout/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.backup'),
+        not_if = 'test -e /etc/mahout/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/mahout/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/mahout/conf',
         to = '/usr/hdp/current/mahout-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.install'),
-        not_if = 'test -e /etc/storm/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.backup'),
+        not_if = 'test -e /etc/storm/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/storm/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/storm/conf',
         to = '/usr/hdp/current/storm-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.install'),
-        not_if = 'test -e /etc/ranger/admin/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
+        not_if = 'test -e /etc/ranger/admin/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
         to = '/usr/hdp/current/ranger-admin/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.install'),
-        not_if = 'test -e /etc/flume/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.backup'),
+        not_if = 'test -e /etc/flume/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/flume/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/flume/conf',
         to = '/usr/hdp/current/flume-server/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.install'),
-        not_if = 'test -e /etc/sqoop/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.backup'),
+        not_if = 'test -e /etc/sqoop/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/sqoop/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/sqoop/conf',
         to = '/usr/hdp/current/sqoop-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.install'),
-        not_if = 'test -e /etc/accumulo/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
+        not_if = 'test -e /etc/accumulo/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/accumulo/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/accumulo/conf',
         to = '/usr/hdp/current/accumulo-client/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.install'),
-        not_if = 'test -e /etc/phoenix/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
+        not_if = 'test -e /etc/phoenix/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/phoenix/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/phoenix/conf',
         to = '/usr/hdp/current/phoenix-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.install'),
-        not_if = 'test -e /etc/storm-slider-client/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
+        not_if = 'test -e /etc/storm-slider-client/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
         to = '/usr/hdp/current/storm-slider-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.install'),
-        not_if = 'test -e /etc/slider/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.backup'),
+        not_if = 'test -e /etc/slider/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/slider/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/slider/conf',
         to = '/usr/hdp/current/slider-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.install'),
-        not_if = 'test -e /etc/oozie/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.backup'),
+        not_if = 'test -e /etc/oozie/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/oozie/conf',
         to = '/usr/hdp/current/oozie-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.install'),
-        not_if = 'test -e /etc/falcon/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.backup'),
+        not_if = 'test -e /etc/falcon/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/falcon/conf',
         action = ['delete'])
@@ -252,24 +252,24 @@ class TestHookAfterInstall(RMFTestCase):
         to = '/usr/hdp/current/falcon-client/conf')
 
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.install'),
-        not_if = 'test -e /etc/spark/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
+        not_if = 'test -e /etc/spark/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/spark/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/spark/conf',
         to = '/usr/hdp/current/spark-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.install'),
-        not_if = 'test -e /etc/kafka/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.backup'),
+        not_if = 'test -e /etc/kafka/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/kafka/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/kafka/conf',
         to = '/usr/hdp/current/kafka-broker/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.install'),
-        not_if = 'test -e /etc/hive/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.backup'),
+        not_if = 'test -e /etc/hive/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/hive/conf',
         action = ['delete'])
@@ -321,40 +321,40 @@ class TestHookAfterInstall(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
       only_if="ls /usr/hdp/current/hadoop-client/conf")
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.install'),
-        not_if = 'test -e /etc/ranger/kms/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
+        not_if = 'test -e /etc/ranger/kms/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
         to = '/usr/hdp/current/ranger-kms/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.install'),
-        not_if = 'test -e /etc/zookeeper/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
+        not_if = 'test -e /etc/zookeeper/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/zookeeper/conf',
         to = '/usr/hdp/current/zookeeper-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.install'),
-        not_if = 'test -e /etc/pig/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
+        not_if = 'test -e /etc/pig/conf.backup',
         sudo = True,)
     # pig fails, so no Directory/Link combo
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.install'),
-        not_if = 'test -e /etc/tez/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
+        not_if = 'test -e /etc/tez/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/tez/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/tez/conf',
         to = '/usr/hdp/current/tez-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.install'),
-        not_if = 'test -e /etc/hive-webhcat/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
+        not_if = 'test -e /etc/hive-webhcat/conf.backup',
         sudo = True,)
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.install'),
-        not_if = 'test -e /etc/hive-hcatalog/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
+        not_if = 'test -e /etc/hive-hcatalog/conf.backup',
         sudo = True,)
 
     self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
@@ -367,120 +367,120 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
         to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.install'),
-        not_if = 'test -e /etc/hbase/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.backup'),
+        not_if = 'test -e /etc/hbase/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/hbase/conf',
         to = '/usr/hdp/current/hbase-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.install'),
-        not_if = 'test -e /etc/knox/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.backup'),
+        not_if = 'test -e /etc/knox/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/knox/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/knox/conf',
         to = '/usr/hdp/current/knox-server/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.install'),
-        not_if = 'test -e /etc/ranger/usersync/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
+        not_if = 'test -e /etc/ranger/usersync/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
         to = '/usr/hdp/current/ranger-usersync/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.install'),
-        not_if = 'test -e /etc/hadoop/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
+        not_if = 'test -e /etc/hadoop/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/hadoop/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/hadoop/conf',
         to = '/usr/hdp/current/hadoop-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.install'),
-        not_if = 'test -e /etc/mahout/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.backup'),
+        not_if = 'test -e /etc/mahout/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/mahout/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/mahout/conf',
         to = '/usr/hdp/current/mahout-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.install'),
-        not_if = 'test -e /etc/storm/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.backup'),
+        not_if = 'test -e /etc/storm/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/storm/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/storm/conf',
         to = '/usr/hdp/current/storm-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.install'),
-        not_if = 'test -e /etc/ranger/admin/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
+        not_if = 'test -e /etc/ranger/admin/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
         to = '/usr/hdp/current/ranger-admin/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.install'),
-        not_if = 'test -e /etc/flume/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.backup'),
+        not_if = 'test -e /etc/flume/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/flume/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/flume/conf',
         to = '/usr/hdp/current/flume-server/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.install'),
-        not_if = 'test -e /etc/sqoop/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.backup'),
+        not_if = 'test -e /etc/sqoop/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/sqoop/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/sqoop/conf',
         to = '/usr/hdp/current/sqoop-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.install'),
-        not_if = 'test -e /etc/accumulo/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
+        not_if = 'test -e /etc/accumulo/conf.backup',
         sudo = True,)
     self.assertResourceCalled('Directory', '/etc/accumulo/conf',
         action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/accumulo/conf',
         to = '/usr/hdp/current/accumulo-client/conf',)
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.install'),
-        not_if = 'test -e /etc/phoenix/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
+        not_if = 'test -e /etc/phoenix/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/phoenix/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/phoenix/conf',
         to = '/usr/hdp/current/phoenix-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.install'),
-        not_if = 'test -e /etc/storm-slider-client/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
+        not_if = 'test -e /etc/storm-slider-client/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
         to = '/usr/hdp/current/storm-slider-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.install'),
-        not_if = 'test -e /etc/slider/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.backup'),
+        not_if = 'test -e /etc/slider/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/slider/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/slider/conf',
         to = '/usr/hdp/current/slider-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.install'),
-        not_if = 'test -e /etc/oozie/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.backup'),
+        not_if = 'test -e /etc/oozie/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/oozie/conf',
         to = '/usr/hdp/current/oozie-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.install'),
-        not_if = 'test -e /etc/falcon/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.backup'),
+        not_if = 'test -e /etc/falcon/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/falcon/conf',
         action = ['delete'])
@@ -488,24 +488,24 @@ class TestHookAfterInstall(RMFTestCase):
         to = '/usr/hdp/current/falcon-client/conf')
 
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.install'),
-        not_if = 'test -e /etc/spark/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
+        not_if = 'test -e /etc/spark/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/spark/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/spark/conf',
         to = '/usr/hdp/current/spark-client/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.install'),
-        not_if = 'test -e /etc/kafka/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.backup'),
+        not_if = 'test -e /etc/kafka/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/kafka/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/kafka/conf',
         to = '/usr/hdp/current/kafka-broker/conf')
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.install'),
-        not_if = 'test -e /etc/hive/conf.install',
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.backup'),
+        not_if = 'test -e /etc/hive/conf.backup',
         sudo = True)
     self.assertResourceCalled('Directory', '/etc/hive/conf',
         action = ['delete'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
index c10ee8a..da52643 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
@@ -143,7 +143,7 @@ class TestFalconClient(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-client', version), sudo=True,)
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 6eb60ce..b011f93 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -538,7 +538,7 @@ class TestFalconServer(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Execute',
+    self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-server', version), sudo=True,)
 
     self.assertResourceCalled('Execute', ('tar',

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 514633c..e43e9ac 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -522,6 +522,7 @@ class TestHiveMetastore(RMFTestCase):
                        call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
+    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-metastore', version), sudo=True,)
     self.assertNoMoreResources()
@@ -673,6 +674,7 @@ class TestHiveMetastore(RMFTestCase):
         tries = 1,
         user = 'hive',
     )
+    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap',
      '/usr/bin/hdp-select',
      'set',
@@ -847,6 +849,8 @@ class TestHiveMetastore(RMFTestCase):
                               logoutput = True, environment = {'HIVE_CONF_DIR': '/usr/hdp/current/hive-server2/conf/conf.server'},
                               tries = 1, user = 'hive')
 
+    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
+
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-metastore', version), sudo=True,)
 
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
index cef5b97..e8a3135 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
@@ -176,7 +176,7 @@ class TestStormDrpcServer(TestStormBase):
                      call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)
     self.assertEquals(1, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
index 8ede94c..18e25c0 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
@@ -175,7 +175,7 @@ class TestStormNimbus(TestStormBase):
                      call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-nimbus', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/357d196e/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
index 2aa3df5..74877af 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
@@ -140,7 +140,7 @@ class TestStormNimbus(TestStormBase):
                      call_mocks = [(0, None, ''), (0, None, '')],
                      mocks_dict = mocks_dict)
 
-    self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-nimbus', '2.3.0.0-1234'), sudo=True)
 
     self.assertEquals(1, mocks_dict['call'].call_count)