You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/07/27 19:37:13 UTC

[1/5] ambari git commit: AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-21450 863343ad3 -> 56d2ade2b


http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 1bfa173..e32f1a2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -25,13 +25,15 @@ from resource_management.libraries.functions import conf_select
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
 class TestHookAfterInstall(RMFTestCase):
+  CONFIG_OVERRIDES = {"serviceName":"HIVE", "role":"HIVE_SERVER"}
 
   def test_hook_default(self):
 
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                        classname="AfterInstallHook",
                        command="hook",
-                       config_file="default.json"
+                       config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES
     )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
@@ -44,7 +46,7 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertNoMoreResources()
 
 
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
   @patch("os.symlink")
@@ -67,11 +69,12 @@ class TestHookAfterInstall(RMFTestCase):
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                        classname="AfterInstallHook",
                        command="hook",
-                       config_dict = json_content)
+                       config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES)
 
 
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^2.3 | tail -1`',
-      only_if = 'ls -d /usr/hdp/2.3*')
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
+      sudo = True)
 
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
@@ -105,7 +108,7 @@ class TestHookAfterInstall(RMFTestCase):
 
     self.assertNoMoreResources()
 
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
   @patch("os.symlink")
@@ -134,11 +137,12 @@ class TestHookAfterInstall(RMFTestCase):
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                        classname="AfterInstallHook",
                        command="hook",
-                       config_dict = json_content)
+                       config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES)
 
 
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^2.3 | tail -1`',
-      only_if = 'ls -d /usr/hdp/2.3*')
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
+      sudo = True)
 
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
@@ -173,7 +177,7 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertNoMoreResources()
 
 
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
   @patch("os.symlink")
@@ -207,12 +211,13 @@ class TestHookAfterInstall(RMFTestCase):
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
       classname="AfterInstallHook",
       command="hook",
-      config_dict = json_content)
+      config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES)
 
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^2.3.0.0-1234 | tail -1`',
-      only_if = 'ls -d /usr/hdp/2.3.0.0-1234*')
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
+      sudo = True)
 
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
   @patch("os.symlink")
@@ -236,7 +241,8 @@ class TestHookAfterInstall(RMFTestCase):
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                        classname="AfterInstallHook",
                        command="hook",
-                       config_dict = json_content)
+                       config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES)
 
     # same assertions as test_hook_default_conf_select, but skip hdp-select set all
 
@@ -274,7 +280,7 @@ class TestHookAfterInstall(RMFTestCase):
 
 
   @patch("resource_management.core.Logger.warning")
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
   @patch("os.symlink")
@@ -303,6 +309,7 @@ class TestHookAfterInstall(RMFTestCase):
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                        classname="AfterInstallHook",
                        command="hook",
-                       config_dict = json_content)
+                       config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES)
 
-    logger_warning_mock.assert_any_call('Skipping running stack-selector-tool for stack 2.3.0.0-1234 as its a sys_prepped host. This may cause symlink pointers not to be created for HDP componets installed later on top of an already sys_prepped host.')
+    logger_warning_mock.assert_any_call('Skipping running stack-selector-tool becase this is a sys_prepped host. This may cause symlink pointers not to be created for HDP componets installed later on top of an already sys_prepped host.')

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
index b9747a2..dcc649d 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
@@ -27,6 +27,8 @@ class TestFalconClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "FALCON/0.5.0.2.1/package"
   STACK_VERSION = "2.1"
 
+  CONFIG_OVERRIDES = {"serviceName":"FALCON", "role":"FALCON_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/falcon_client.py",
                        classname="FalconClient",
@@ -101,6 +103,7 @@ class TestFalconClient(RMFTestCase):
                        classname = "FalconClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -120,6 +123,7 @@ class TestFalconClient(RMFTestCase):
                        classname = "FalconClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 44da365..a7e6a1e 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -33,6 +33,8 @@ class TestFalconServer(RMFTestCase):
   UPGRADE_STACK_VERSION = "2.2"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"FALCON", "role":"FALCON_SERVER"}
+
   def test_start_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/falcon_server.py",
       classname="FalconServer",
@@ -225,6 +227,7 @@ class TestFalconServer(RMFTestCase):
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/falcon_server.py",
      classname = "FalconServer", command = "restart", config_file = "falcon-upgrade.json",
+     config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES )
 
@@ -427,6 +430,7 @@ class TestFalconServer(RMFTestCase):
                        classname = "FalconServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -463,6 +467,7 @@ class TestFalconServer(RMFTestCase):
                        classname = "FalconServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 3556e34..796b107 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -33,6 +33,8 @@ class TestHiveMetastore(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = { "serviceName" : "HIVE", "role" : "HIVE_METASTORE" }
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_metastore.py",
                        classname = "HiveMetastore",
@@ -417,6 +419,7 @@ class TestHiveMetastore(RMFTestCase):
                        classname = "HiveMetastore",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -438,6 +441,7 @@ class TestHiveMetastore(RMFTestCase):
                        classname = "HiveMetastore",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -552,6 +556,7 @@ class TestHiveMetastore(RMFTestCase):
       classname = "HiveMetastore",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       call_mocks = [(0, None, ''), (0, None)],
@@ -571,7 +576,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalledIgnoreEarlier('Directory', '/var/lib/hive', owner = 'hive', group = 'hadoop',
       mode = 0755, create_parents = True, cd_access = 'a')
 
-    self.assertResourceCalled('Execute', ('rm', '-f', '/usr/hdp/current/hive-server2/lib/ojdbc6.jar'),
+    self.assertResourceCalled('Execute', ('rm', '-f', '/usr/hdp/current/hive-metastore/lib/ojdbc6.jar'),
         path = ['/bin', '/usr/bin/'],
         sudo = True)
 
@@ -708,6 +713,7 @@ class TestHiveMetastore(RMFTestCase):
                        classname = "HiveMetastore",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -724,7 +730,7 @@ class TestHiveMetastore(RMFTestCase):
       mode = 0755, create_parents = True, cd_access = 'a')
 
     self.assertResourceCalled('Execute',
-                              ('rm', '-f', '/usr/hdp/current/hive-server2/lib/ojdbc6.jar'),
+                              ('rm', '-f', '/usr/hdp/current/hive-metastore/lib/ojdbc6.jar'),
                               path=["/bin", "/usr/bin/"],
                               sudo = True)
 
@@ -738,41 +744,41 @@ class TestHiveMetastore(RMFTestCase):
                               sudo = True)
 
     self.assertResourceCalled('Execute',
-                              ('yes | ambari-sudo.sh cp /tmp/sqla-client-jdbc/java/* /usr/hdp/current/hive-server2/lib'))
+                              ('yes | ambari-sudo.sh cp /tmp/sqla-client-jdbc/java/* /usr/hdp/current/hive-metastore/lib'))
 
     self.assertResourceCalled('Directory',
-                              '/usr/hdp/current/hive-server2/lib/native/lib64',
+                              '/usr/hdp/current/hive-metastore/lib/native/lib64',
                               create_parents = True)
 
     self.assertResourceCalled('Execute',
-                              ('yes | ambari-sudo.sh cp /tmp/sqla-client-jdbc/native/lib64/* /usr/hdp/current/hive-server2/lib/native/lib64'))
+                              ('yes | ambari-sudo.sh cp /tmp/sqla-client-jdbc/native/lib64/* /usr/hdp/current/hive-metastore/lib/native/lib64'))
 
     self.assertResourceCalled('Execute',
-                              ('ambari-sudo.sh chown -R hive:hadoop /usr/hdp/current/hive-server2/lib/*'))
+                              ('ambari-sudo.sh chown -R hive:hadoop /usr/hdp/current/hive-metastore/lib/*'))
 
     self.assertResourceCalled('File', '/usr/hdp/2.3.0.0-1234/hive/lib/sqla-client-jdbc.tar.gz',
                               mode = 0644,
                               )
 
     self.assertResourceCalled('Execute',
-                              ('yes | ambari-sudo.sh cp /usr/hdp/current/hive-server2/lib/*.jar /usr/hdp/2.3.0.0-1234/hive/lib'))
+                              ('yes | ambari-sudo.sh cp /usr/hdp/current/hive-metastore/lib/*.jar /usr/hdp/2.3.0.0-1234/hive/lib'))
 
     self.assertResourceCalled('Directory',
                               '/usr/hdp/2.3.0.0-1234/hive/lib/native/lib64',
                               create_parents = True)
 
     self.assertResourceCalled('Execute',
-                              ('yes | ambari-sudo.sh cp /usr/hdp/current/hive-server2/lib/native/lib64/* /usr/hdp/2.3.0.0-1234/hive/lib/native/lib64'))
+                              ('yes | ambari-sudo.sh cp /usr/hdp/current/hive-metastore/lib/native/lib64/* /usr/hdp/2.3.0.0-1234/hive/lib/native/lib64'))
 
     self.assertResourceCalled('Execute',
-                              ('ambari-sudo.sh chown -R hive:hadoop /usr/hdp/current/hive-server2/lib/*'))
+                              ('ambari-sudo.sh chown -R hive:hadoop /usr/hdp/current/hive-metastore/lib/*'))
 
     self.assertResourceCalled('File', '/usr/hdp/2.3.0.0-1234/hive/lib/sqla-client-jdbc.tar.gz',
                               mode = 0644,
                               )
 
     self.assertResourceCalled('Execute', "/usr/hdp/2.3.0.0-1234/hive/bin/schematool -dbType sqlanywhere -upgradeSchema",
-                              logoutput = True, environment = {'HIVE_CONF_DIR': '/usr/hdp/current/hive-server2/conf/conf.server'},
+                              logoutput = True, environment = {'HIVE_CONF_DIR': '/usr/hdp/current/hive-metastore/conf/conf.server'},
                               tries = 1, user = 'hive')
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
index f208d3a..608b156 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
@@ -27,6 +27,8 @@ from test_storm_base import TestStormBase
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0, '123', '')))
 class TestStormDrpcServer(TestStormBase):
 
+  CONFIG_OVERRIDES = {"serviceName":"STORM", "role":"DRPC_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/drpc_server.py",
                        classname = "DrpcServer",
@@ -144,6 +146,7 @@ class TestStormDrpcServer(TestStormBase):
                      classname = "DrpcServer",
                      command = "pre_upgrade_restart",
                      config_file="default.json",
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -161,6 +164,7 @@ class TestStormDrpcServer(TestStormBase):
                      classname = "DrpcServer",
                      command = "pre_upgrade_restart",
                      config_dict = json_content,
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
                      call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
index fd25126..ab5da22 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
@@ -26,6 +26,7 @@ from test_storm_base import TestStormBase
 
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0, '123', '')))
 class TestStormNimbus(TestStormBase):
+  CONFIG_OVERRIDES = {"serviceName":"STORM", "role":"NIMBUS"}
 
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nimbus.py",
@@ -282,6 +283,7 @@ class TestStormNimbus(TestStormBase):
                        classname = "Nimbus",
                        command = "pre_upgrade_restart",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -300,6 +302,7 @@ class TestStormNimbus(TestStormBase):
                      classname = "Nimbus",
                      command = "pre_upgrade_restart",
                      config_dict = json_content,
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
                      call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
index 850a98b..efa50c8 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
@@ -25,6 +25,7 @@ import resource_management.core.source
 from test_storm_base import TestStormBase
 
 class TestStormNimbus(TestStormBase):
+  CONFIG_OVERRIDES = {"serviceName":"STORM", "role":"NIMBUS"}
 
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nimbus_prod.py",
@@ -117,6 +118,7 @@ class TestStormNimbus(TestStormBase):
                        classname = "Nimbus",
                        command = "pre_upgrade_restart",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -136,6 +138,7 @@ class TestStormNimbus(TestStormBase):
                      classname = "Nimbus",
                      command = "pre_upgrade_restart",
                      config_dict = json_content,
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
                      call_mocks = [(0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
index b7ec7ba..116597b 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
@@ -27,6 +27,8 @@ from test_storm_base import TestStormBase
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0, '123', '')))
 class TestStormSupervisor(TestStormBase):
 
+  CONFIG_OVERRIDES = {"serviceName":"STORM", "role":"SUPERVISOR"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/supervisor.py",
                        classname = "Supervisor",
@@ -182,6 +184,7 @@ class TestStormSupervisor(TestStormBase):
                        classname = "Supervisor",
                        command = "pre_upgrade_restart",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -200,6 +203,7 @@ class TestStormSupervisor(TestStormBase):
                      classname = "Supervisor",
                      command = "pre_upgrade_restart",
                      config_dict = json_content,
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
                      call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
index 21c5f2c..46221a9 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
@@ -27,6 +27,8 @@ from test_storm_base import TestStormBase
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0, '123', '')))
 class TestStormSupervisor(TestStormBase):
 
+  CONFIG_OVERRIDES = {"serviceName":"STORM", "role":"SUPERVISOR"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/supervisor_prod.py",
                        classname = "Supervisor",
@@ -159,6 +161,7 @@ class TestStormSupervisor(TestStormBase):
                        classname = "Supervisor",
                        command = "pre_upgrade_restart",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -178,6 +181,7 @@ class TestStormSupervisor(TestStormBase):
                      classname = "Supervisor",
                      command = "pre_upgrade_restart",
                      config_dict = json_content,
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
                      call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
index 3ac38c7..985c754 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
@@ -27,6 +27,8 @@ from test_storm_base import TestStormBase
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0, '123', '')))
 class TestStormUiServer(TestStormBase):
 
+  CONFIG_OVERRIDES = {"serviceName":"STORM", "role":"STORM_UI_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ui_server.py",
                        classname = "UiServer",
@@ -209,10 +211,10 @@ class TestStormUiServer(TestStormBase):
     )
     self.assertNoMoreResources()
 
-  def test_pre_upgrade_restart(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ui_server.py",
                        classname = "UiServer",
                        command = "pre_upgrade_restart",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        config_file="default.json",
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
@@ -231,6 +233,7 @@ class TestStormUiServer(TestStormBase):
                      classname = "UiServer",
                      command = "pre_upgrade_restart",
                      config_dict = json_content,
+                     config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
                      call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index e63090a..bd7567a 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -27,6 +27,8 @@ class TestTezClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "TEZ/0.4.0.2.1/package"
   STACK_VERSION = "2.1"
 
+  CONFIG_OVERRIDES = {"serviceName":"TEZ", "role":"TEZ_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/tez_client.py",
                        classname = "TezClient",
@@ -70,6 +72,7 @@ class TestTezClient(RMFTestCase):
                        classname = "TezClient",
                        command = "restart",
                        config_file="client-upgrade.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -84,6 +87,7 @@ class TestTezClient(RMFTestCase):
                        classname = "TezClient",
                        command = "restart",
                        config_file="client-upgrade.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index 2ebb315..8cd996e 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -34,6 +34,8 @@ class TestAppTimelineServer(RMFTestCase):
   STACK_VERSION = "2.0.6"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"APP_TIMELINE_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/application_timeline_server.py",
                        classname="ApplicationTimelineServer",
@@ -294,6 +296,7 @@ class TestAppTimelineServer(RMFTestCase):
                        classname = "ApplicationTimelineServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py b/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
index e3430e5..3ffd0a9 100644
--- a/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
@@ -29,6 +29,8 @@ class TestAccumuloClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "ACCUMULO/1.6.1.2.2.0/package"
   STACK_VERSION = "2.1"
 
+  CONFIG_OVERRIDES = {"serviceName":"ACCUMULO", "role":"ACCUMULO_CLIENT"}
+
   def test_pre_upgrade_restart(self):
     config_file = self.get_src_folder() + "/test/python/stacks/2.2/configs/default.json"
     with open(config_file, "r") as f:
@@ -41,6 +43,7 @@ class TestAccumuloClient(RMFTestCase):
       classname = "AccumuloClient",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -62,6 +65,7 @@ class TestAccumuloClient(RMFTestCase):
       classname = "AccumuloClient",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
index d6afa50..477830e 100644
--- a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
+++ b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
@@ -28,6 +28,8 @@ class TestKafkaBroker(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "KAFKA/0.8.1/package"
   STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"KAFKA", "role":"KAFKA_BROKER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/kafka_broker.py",
                          classname = "KafkaBroker",
@@ -133,6 +135,7 @@ class TestKafkaBroker(RMFTestCase):
                        classname = "KafkaBroker",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -152,6 +155,7 @@ class TestKafkaBroker(RMFTestCase):
                        classname = "KafkaBroker",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
index 638f92e..d4dcc48 100644
--- a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
@@ -27,6 +27,8 @@ class TestSliderClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "SLIDER/0.60.0.2.2/package"
   STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"SLIDER", "role":"SLIDER"}
+
   def test_configure_default(self):
     self.maxDiff = None
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/slider_client.py",
@@ -168,6 +170,7 @@ class TestSliderClient(RMFTestCase):
                        classname = "SliderClient",
                        command = "pre_upgrade_restart",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -187,6 +190,7 @@ class TestSliderClient(RMFTestCase):
                        classname = "SliderClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
@@ -194,6 +198,8 @@ class TestSliderClient(RMFTestCase):
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'slider-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.3.0.0-1234'), sudo=True)
+    self.assertResourceCalled('Link', '/etc/hadoop/conf', to='/usr/hdp/current/hadoop-client/conf')
+
     self.assertNoMoreResources()
 
     self.assertEquals(2, mocks_dict['call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 186ff72..b87f8fc 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -30,6 +30,8 @@ class TestJobHistoryServer(RMFTestCase):
   STACK_VERSION = "2.2"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"SPARK", "role":"SPARK_JOBHISTORYSERVER"}
+
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   def test_configure_default(self, copy_to_hdfs_mock):
     copy_to_hdfs_mock = True
@@ -334,6 +336,7 @@ class TestJobHistoryServer(RMFTestCase):
                        classname = "JobHistoryServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index 0921edc..3fe59ad 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -29,6 +29,8 @@ class TestSparkClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.1/package"
   STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"SPARK", "role":"SPARK_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/spark_client.py",
                    classname = "SparkClient",
@@ -163,6 +165,7 @@ class TestSparkClient(RMFTestCase):
                        classname = "SparkClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -192,6 +195,7 @@ class TestSparkClient(RMFTestCase):
                        classname = "SparkClient",
                        command = "stack_upgrade_save_new_config",
                        config_dict = json_content,
+                       config_overrides =  self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
index 17be97f..9286b7a 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
@@ -29,6 +29,8 @@ class TestMahoutClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "MAHOUT/1.0.0.2.3/package"
   STACK_VERSION = "2.3"
 
+  CONFIG_OVERRIDES = {"serviceName":"MAHOUT", "role":"MAHOUT"}
+
   def test_configure_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mahout_client.py",
@@ -73,6 +75,7 @@ class TestMahoutClient(RMFTestCase):
       classname = "MahoutClient",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -98,6 +101,7 @@ class TestMahoutClient(RMFTestCase):
       classname = "MahoutClient",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       call_mocks = itertools.cycle([(0, None, '')]),

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index e8e9320..ddd1ab3 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -30,6 +30,8 @@ class TestSparkThriftServer(RMFTestCase):
   STACK_VERSION = "2.3"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"SPARK", "role":"SPARK_THRIFTSERVER"}
+
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   def test_configure_default(self, copy_to_hdfs_mock):
     copy_to_hdfs_mock = True
@@ -180,6 +182,7 @@ class TestSparkThriftServer(RMFTestCase):
                        classname = "SparkThriftServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/utils/RMFTestCase.py b/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
index b19920f..3612890 100644
--- a/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
+++ b/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
@@ -109,6 +109,9 @@ class RMFTestCase(TestCase):
     if "stack_features" not in self.config_dict["configurations"]["cluster-env"]:
       self.config_dict["configurations"]["cluster-env"]["stack_features"] = RMFTestCase.get_stack_features()
 
+    if "stack_select_packages" not in self.config_dict["configurations"]["cluster-env"]:
+      self.config_dict["configurations"]["cluster-env"]["stack_select_packages"] = RMFTestCase.get_stack_select_packages()
+
     if config_overrides:
       for key, value in config_overrides.iteritems():
         self.config_dict[key] = value
@@ -191,6 +194,15 @@ class RMFTestCase(TestCase):
       return f.read()
 
   @staticmethod
+  def get_stack_select_packages():
+    """
+    Read stack_select_packages config property from resources/stacks/HDP/2.0.6/properties/stack_features.json
+    """
+    stack_features_file = os.path.join(RMFTestCase.get_src_folder(), PATH_TO_STACKS, "2.0.6", "properties", "stack_select_packages.json")
+    with open(stack_features_file, "r") as f:
+      return f.read()
+
+  @staticmethod
   def _getStackTestsFolder():
     return os.path.join(RMFTestCase.get_src_folder(), PATH_TO_STACK_TESTS)
 


[3/5] ambari git commit: AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_server.py
index d6d6d37..11c8c8e 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_server.py
@@ -42,9 +42,6 @@ from oozie_service import oozie_service
 
 class OozieServer(Script):
 
-  def get_component_name(self):
-    return "oozie-server"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -61,9 +58,8 @@ class OozieServer(Script):
       # This is required as both need to be pointing to new installed oozie version.
 
       # Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
-      stack_select.select("oozie-client", params.version)
       # Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
-      stack_select.select("oozie-server", params.version)
+      stack_select.select_packages(params.version)
 
     env.set_params(params)
     oozie(is_server=True)
@@ -181,7 +177,7 @@ class OozieServer(Script):
     oozie_server_upgrade.backup_configuration()
 
     conf_select.select(params.stack_name, "oozie", params.version)
-    stack_select.select("oozie-server", params.version)
+    stack_select.select_packages(params.version)
     #Execute(format("stack-select set oozie-server {version}"))
 
     oozie_server_upgrade.restore_configuration()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py
index bd08b56..96a48c1 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/PIG/package/scripts/pig_client.py
@@ -39,9 +39,6 @@ class PigClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class PigClientLinux(PigClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -49,7 +46,7 @@ class PigClientLinux(PigClient):
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version) # includes pig-client
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py
index 11e3cd3..e5c1483 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SLIDER/package/scripts/slider_client.py
@@ -26,23 +26,19 @@ from slider import slider
 
 class SliderClient(Script):
 
-  def get_component_name(self):
-    return "slider-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "slider", params.version)
-      stack_select.select("slider-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set slider-client {version}"))
 
       # also set all of the hadoop clients since slider client is upgraded as
       # part of the final "CLIENTS" group and we need to ensure that
       # hadoop-client is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
       #Execute(format("stack-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_server.py
index 5cefc73..3605264 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_server.py
@@ -43,7 +43,7 @@ class SolrServer(Script):
     import params
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      stack_select.select("solr-server", params.version)
+      stack_select.select_packages(params.version)
 
       call_conf_select = True
       conf_dir = '/usr/iop/4.1.0.0/solr/conf'

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/job_history_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/job_history_server.py
index 439defe..654b88a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/job_history_server.py
@@ -32,16 +32,13 @@ from spark import *
 
 class JobHistoryServer(Script):
 
-  def get_component_name(self):
-     return "spark-historyserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-historyserver", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set spark-historyserver {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
index 2345c63..7f2e7fc 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
@@ -31,16 +31,13 @@ from spark import spark
 
 class SparkClient(Script):
 
-  def get_component_name(self):
-    return "spark-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set spark-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
index 39e15d3..75908a7 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
@@ -33,17 +33,13 @@ from spark import *
 
 class ThriftServer(Script):
 
-  def get_component_name(self):
-    return "spark-thriftserver"
-
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-thriftserver", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
index b1f658d..211c917 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
@@ -24,9 +24,6 @@ from resource_management import *
 
 class SqoopServiceCheck(Script):
 
-  def get_component_name(self):
-    return "sqoop-server"
-
   def service_check(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
index 56b7135..5e48aca 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
@@ -29,16 +29,13 @@ from sqoop import sqoop
 
 class SqoopClient(Script):
 
-  def get_component_name(self):
-    return "sqoop-client"
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "sqoop", params.version)
-      stack_select.select("sqoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set sqoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/application_timeline_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/application_timeline_server.py
index 5152cf9..125a65f 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/application_timeline_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/application_timeline_server.py
@@ -33,9 +33,6 @@ from service import service
 
 class ApplicationTimelineServer(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-timelineserver"
-
   def install(self, env):
     self.install_packages(env)
     #self.configure(env)
@@ -52,7 +49,7 @@ class ApplicationTimelineServer(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-timelineserver", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-yarn-timelineserver {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/historyserver.py
index 20635a2..d887fce 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/historyserver.py
@@ -41,9 +41,6 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 
 class HistoryServer(Script):
 
-  def get_component_name(self):
-    return "hadoop-mapreduce-historyserver"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -59,7 +56,7 @@ class HistoryServer(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-mapreduce-historyserver", params.version)
+      stack_select.select_packages(params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.host_sys_prepped)
       copy_to_hdfs("slider", params.user_group, params.hdfs_user, skip=params.host_sys_prepped)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/mapreduce2_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/mapreduce2_client.py
index 77fd49e..fe64193 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/mapreduce2_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/mapreduce2_client.py
@@ -28,16 +28,13 @@ from yarn import yarn
 
 class MapReduce2Client(Script):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/nodemanager.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/nodemanager.py
index d185b7e..c048127 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/nodemanager.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/nodemanager.py
@@ -35,9 +35,6 @@ from service import service
 
 class Nodemanager(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-nodemanager"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -53,7 +50,7 @@ class Nodemanager(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-nodemanager", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-yarn-nodemanager {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/resourcemanager.py
index 8e350b9..554c23d 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/resourcemanager.py
@@ -33,9 +33,6 @@ from service import service
 
 class Resourcemanager(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-resourcemanager"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -52,7 +49,7 @@ class Resourcemanager(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-resourcemanager", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-yarn-resourcemanager {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/yarn_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/yarn_client.py
index 0af5ccb..1f139bf 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/yarn_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/YARN/package/scripts/yarn_client.py
@@ -40,16 +40,13 @@ class YarnClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-client {version}"))
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
index d861f6f..0cb62bf 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
@@ -74,7 +74,7 @@ def zookeeper(type = None, upgrade_type=None):
     # This path may be missing after Ambari upgrade. We need to create it.
     if (upgrade_type == "rolling") and (not os.path.exists("/usr/iop/current/zookeeper-server")) and params.current_version:
       conf_select(params.stack_name, "zookeeper", params.current_version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set zookeeper-server {version}"))
 
   if (params.log4j_props != None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_client.py
index 80ca08a..63e1df8 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_client.py
@@ -30,9 +30,6 @@ from zookeeper import zookeeper
 
 class ZookeeperClient(Script):
 
-  def get_component_name(self):
-    return "zookeeper-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -50,7 +47,7 @@ class ZookeeperClient(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set zookeeper-client {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_server.py
index ccba9ce..727622a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_server.py
@@ -40,9 +40,6 @@ from zookeeper_service import zookeeper_service
 
 class ZookeeperServer(Script):
 
-  def get_component_name(self):
-    return "zookeeper-server"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -59,7 +56,7 @@ class ZookeeperServer(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set zookeeper-server {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/shared_initialization.py
index d306066..1324c8c 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -43,10 +43,21 @@ def setup_stack_symlinks():
     # which would only be during an intial cluster installation
     version = params.current_version if params.current_version is not None else params.stack_version_unformatted
 
-    if not params.upgrade_suspended:
-      # On parallel command execution this should be executed by a single process at a time.
-      with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-        stack_select.select_all(version)
+    if params.upgrade_suspended:
+      Logger.warning(
+        "Skipping running stack-selector-tool for stack {0} because there is a suspended upgrade")
+      return
+
+    # get the packages which the stack-select tool should be used on
+    stack_select_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+    if stack_select_packages is None:
+      return
+
+    # On parallel command execution this should be executed by a single process at a time.
+    with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+      for package in stack_select_packages:
+        stack_select.select(package, version)
+
 
 def setup_config():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_client.py
index f18a96a..1c3f113 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_client.py
@@ -52,20 +52,16 @@ class HbaseClientWindows(HbaseClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseClientDefault(HbaseClient):
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
 
       # phoenix may not always be deployed
       try:
-        stack_select.select("phoenix-client", params.version)
+        stack_select.select_packages(params.version)
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print str(e)
@@ -75,7 +71,7 @@ class HbaseClientDefault(HbaseClient):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_master.py
index ae0e0d9..461eab5 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_master.py
@@ -76,13 +76,10 @@ class HbaseMasterWindows(HbaseMaster):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseMasterDefault(HbaseMaster):
-  def get_component_name(self):
-    return "hbase-master"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_regionserver.py
index 370167b..bd11fa0 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_regionserver.py
@@ -70,13 +70,10 @@ class HbaseRegionServerWindows(HbaseRegionServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_restgatewayserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_restgatewayserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_restgatewayserver.py
index 1d028d6..23900f4 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_restgatewayserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/hbase_restgatewayserver.py
@@ -27,9 +27,6 @@ from hbase_decommission import hbase_decommission
 import upgrade
 
 class HbaseRestGatewayServer(Script):
-  def get_component_name(self):
-    return "hbase-restserver"
-
   def install(self, env):
     self.install_packages(env)
     
@@ -42,7 +39,7 @@ class HbaseRestGatewayServer(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-restserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/phoenix_queryserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/phoenix_queryserver.py
index 82113e9..1621916 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/phoenix_queryserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/phoenix_queryserver.py
@@ -34,10 +34,6 @@ class PhoenixQueryServer(Script):
     self.install_packages(env)
 
 
-  def get_component_name(self):
-    return "phoenix-server"
-
-
   def configure(self, env):
     import params
     env.set_params(params)
@@ -64,7 +60,7 @@ class PhoenixQueryServer(Script):
     if params.stack_version_formatted and check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("phoenix-server", params.version)
+      stack_select.select_packages(params.version)
 
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/upgrade.py
index b1a19e6..dbeb55d 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HBASE/package/scripts/upgrade.py
@@ -27,12 +27,12 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 from resource_management.libraries.functions.decorator import retry
 from resource_management.libraries.functions import check_process_status
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/FLUME/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/FLUME/package/scripts/flume_handler.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/FLUME/package/scripts/flume_handler.py
index 22f4b29..d70d06a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/FLUME/package/scripts/flume_handler.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/FLUME/package/scripts/flume_handler.py
@@ -34,9 +34,6 @@ from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 
 class FlumeHandler(Script):
 
-  def get_component_name(self):
-    return "flume-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -137,7 +134,7 @@ class FlumeHandler(Script):
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "flume", params.version)
-    stack_select.select("flume-server", params.version)
+    stack_select.select_packages(params.version)
     if params.upgrade_direction == Direction.UPGRADE:
       flume_upgrade.pre_start_restore()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_client.py
index ba53462..2b83664 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_client.py
@@ -28,21 +28,17 @@ from hbase import hbase
          
 class HbaseClient(Script):
 
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
       #Execute(format("iop-select set hbase-client {version}"))
       
       # phoenix may not always be deployed
       try:
-        stack_select.select("phoenix-client", params.version)
+        stack_select.select_packages(params.version)
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print str(e)
@@ -51,7 +47,6 @@ class HbaseClient(Script):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_master.py
index ccca90c..23643e7 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_master.py
@@ -31,9 +31,6 @@ from setup_ranger_hbase import setup_ranger_hbase
          
 class HbaseMaster(Script):
 
-  def get_component_name(self):
-    return "hbase-master"
-
   def install(self, env):
     self.install_packages(env)
     
@@ -46,7 +43,7 @@ class HbaseMaster(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_regionserver.py
index d9bf76c..b92a29a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_regionserver.py
@@ -30,9 +30,6 @@ from setup_ranger_hbase import setup_ranger_hbase
          
 class HbaseRegionServer(Script):
 
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def install(self, env):
     self.install_packages(env)
     
@@ -45,7 +42,7 @@ class HbaseRegionServer(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_restgatewayserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_restgatewayserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_restgatewayserver.py
index c7e0cc4..23900f4 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_restgatewayserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/hbase_restgatewayserver.py
@@ -27,9 +27,6 @@ from hbase_decommission import hbase_decommission
 import upgrade
 
 class HbaseRestGatewayServer(Script):
-  def get_component_name(self):
-    return "hbase-restserver"
-    
   def install(self, env):
     self.install_packages(env)
     
@@ -42,7 +39,7 @@ class HbaseRestGatewayServer(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-restserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/phoenix_queryserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/phoenix_queryserver.py
index 88d5f58..51b8c65 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/phoenix_queryserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/phoenix_queryserver.py
@@ -30,11 +30,6 @@ class PhoenixQueryServer(Script):
     import params
     self.install_packages(env)
 
-
-  def get_component_name(self):
-    return "phoenix-server"
-
-
   def configure(self, env):
     import params
     env.set_params(params)
@@ -61,7 +56,7 @@ class PhoenixQueryServer(Script):
     if Script.is_stack_greater_or_equal("4.2"):
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("phoenix-server", params.version)
+      stack_select.select_packages(params.version)
 
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/upgrade.py
index 11b770d..f1024c5 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/upgrade.py
@@ -26,12 +26,12 @@ from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.decorator import retry
 
-def prestart(env, component):
+def prestart(env):
   import params
 
   if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/datanode.py
index a58db09..7f38a78 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/datanode.py
@@ -34,7 +34,7 @@ class DataNode(Script):
     Get the name or path to the hdfs binary depending on the stack and version.
     """
     import params
-    stack_to_comp = self.get_component_name()
+    stack_to_comp = stack_select.get_package_name()
     if params.stack_name in stack_to_comp:
       return get_hdfs_binary(stack_to_comp[params.stack_name])
     return "hdfs"
@@ -74,16 +74,13 @@ class DataNode(Script):
     env.set_params(status_params)
     check_process_status(status_params.datanode_pid_file)
 
-  def get_component_name(self):
-    return "hadoop-hdfs-datanode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade pre-restart")
     import params
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-hdfs-datanode {version}"))
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/hdfs_client.py
index 17b64f0..62c6dd0 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/hdfs_client.py
@@ -29,9 +29,6 @@ from utils import service
 
 class HdfsClient(Script):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def install(self, env):
     import params
 
@@ -44,7 +41,7 @@ class HdfsClient(Script):
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/journalnode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/journalnode.py
index 8a7f572..3f44de0 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/journalnode.py
@@ -34,9 +34,6 @@ import journalnode_upgrade
 
 class JournalNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-journalnode"
-
   def install(self, env):
     import params
 
@@ -50,7 +47,7 @@ class JournalNode(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-hdfs-journalnode {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
index 4ff4a4b..280aeee 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
@@ -58,15 +58,12 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-namenode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the stack and version.
     """
     import params
-    stack_to_comp = self.get_component_name()
+    stack_to_comp = stack_select.get_package_name()
     if params.stack_name in stack_to_comp:
       return get_hdfs_binary(stack_to_comp[params.stack_name])
     return "hdfs"
@@ -100,7 +97,7 @@ class NameNode(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-namenode", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-hdfs-namenode {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
index 9e7bfb1..b105d69 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
@@ -30,9 +30,6 @@ from resource_management.libraries.functions.version import compare_versions, fo
 
 class NFSGateway(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-nfs3"
-
   def install(self, env):
     import params
 
@@ -46,7 +43,7 @@ class NFSGateway(Script):
 
     if Script.is_stack_greater_or_equal('4.1.0.0'):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
index 6bd73cc..6a7fd43 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
@@ -31,9 +31,6 @@ from hdfs import hdfs
 
 class SNameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-secondarynamenode"
-
   def install(self, env):
     import params
 
@@ -48,7 +45,7 @@ class SNameNode(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hcat_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hcat_client.py
index e914cf6..8ae64b9 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hcat_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hcat_client.py
@@ -42,9 +42,7 @@ class HCatClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
+  pass
 
 if __name__ == "__main__":
   HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
index c5d821b..14a5f83 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
@@ -42,9 +42,6 @@ class HiveClient(Script):
   
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveClientDefault(HiveClient):        
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Hive client Stack Upgrade pre-restart")
 
@@ -54,7 +51,7 @@ class HiveClientDefault(HiveClient):
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     """
@@ -77,7 +74,7 @@ class HiveClientDefault(HiveClient):
     # HCat client doesn't have a first-class entry in hdp-select. Since clients always
     # update after daemons, this ensures that the hcat directories are correct on hosts
     # which do not include the WebHCat daemon
-    stack_select.select("hive-webhcat", params.version)
+    stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
index 268db10..0a651f9 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
@@ -73,9 +73,6 @@ class HiveMetastore(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveMetastoreDefault(HiveMetastore):
-  def get_component_name(self):
-    return "hive-metastore"
-
   def status(self, env):
     import status_params
     from resource_management.libraries.functions import check_process_status
@@ -100,7 +97,7 @@ class HiveMetastoreDefault(HiveMetastore):
       
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-metastore", params.version)
+      stack_select.select_packages(params.version)
 
 
   def security_status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
index eb2be94..79e4c73 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
@@ -56,9 +56,6 @@ class HiveServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerDefault(HiveServer):
-  def get_component_name(self):
-    return "hive-server2"
-
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -99,7 +96,7 @@ class HiveServerDefault(HiveServer):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-server2", params.version)
+      stack_select.select_packages(params.version)
       
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
       resource_created = copy_to_hdfs(

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
index 72772c9..9f1596b 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
@@ -54,9 +54,6 @@ class WebHCatServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class WebHCatServerDefault(WebHCatServer):
-  def get_component_name(self):
-    return "hive-webhcat"
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -71,7 +68,7 @@ class WebHCatServerDefault(WebHCatServer):
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
       conf_select.select(params.stack_name, "hive-hcatalog", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hive-webhcat", params.version)
+      stack_select.select_packages(params.version)
 
   def security_status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
index a21acd1..cf08b9b 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
@@ -33,9 +33,6 @@ from kafka import kafka
 
 class KafkaBroker(Script):
 
-  def get_component_name(self):
-    return "kafka-broker"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -49,7 +46,7 @@ class KafkaBroker(Script):
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      stack_select.select("kafka-broker", params.version)
+      stack_select.select_packages(params.version)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       conf_select.select(params.stack_name, "kafka", params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
index d5821e8..2b3a103 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
@@ -44,11 +44,6 @@ from knox_ldap import ldap
 from setup_ranger_knox import setup_ranger_knox
 
 class KnoxGateway(Script):
-
-
-  def get_component_name(self):
-    return "knox-server"
-
   def install(self, env):
     self.install_packages(env)
     import params
@@ -118,7 +113,7 @@ class KnoxGateway(Script):
       # conf-select will change the symlink to the conf folder.
       conf_select.select(params.stack_name, "knox", params.version)
 #     hdp_select.select("knox-server", params.version)
-      stack_select.select("knox-server", params.version)
+      stack_select.select_packages(params.version)
 
       # Extract the tar of the old conf folder into the new conf directory
       if absolute_backup_dir is not None and params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
index de30091..8f4577e 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
@@ -29,9 +29,6 @@ from oozie_service import oozie_service
 
 class OozieClient(Script):
 
-  def get_component_name(self):
-    return "oozie-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -58,7 +55,7 @@ class OozieClient(Script):
 
     Logger.info("Executing Oozie Client Rolling Upgrade pre-restart")
     conf_select.select(params.stack_name, "oozie", params.version)
-    stack_select.select("oozie-client", params.version)
+    stack_select.select_packages(params.version)
     #Execute(format("iop-select set oozie-client {version}"))
 
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
index 52e0de1..c2e1af5 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
@@ -42,9 +42,6 @@ from oozie_service import oozie_service
          
 class OozieServer(Script):
 
-  def get_component_name(self):
-    return "oozie-server"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -61,10 +58,9 @@ class OozieServer(Script):
       # This is required as both need to be pointing to new installed oozie version.
 
       # Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
-      stack_select.select("oozie-client", params.version)
       # Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
-      stack_select.select("oozie-server", params.version)
-    
+      stack_select.select_packages(params.version)
+
     env.set_params(params)
     oozie(is_server=True)
 
@@ -181,7 +177,7 @@ class OozieServer(Script):
     oozie_server_upgrade.backup_configuration()
 
     conf_select.select(params.stack_name, "oozie", params.version)
-    stack_select.select("oozie-server", params.version)
+    stack_select.select_packages(params.version)
     #Execute(format("iop-select set oozie-server {version}"))
 
     oozie_server_upgrade.restore_configuration()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
index bd08b56..96a48c1 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
@@ -39,9 +39,6 @@ class PigClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class PigClientLinux(PigClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -49,7 +46,7 @@ class PigClientLinux(PigClient):
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version) # includes pig-client
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
index b2d1c74..c34951b 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
@@ -35,9 +35,6 @@ class RangerAdmin(Script):
 
   upgrade_marker_file = '/tmp/rangeradmin_ru.inprogress'
 
-  def get_component_name(self):
-    return "ranger-admin"
-
   def install(self, env):
     self.install_packages(env)
     import params
@@ -178,7 +175,7 @@ class RangerAdmin(Script):
     stack_name = upgrade_stack[0]
     stack_version = upgrade_stack[1]
 
-    stack_select.select("ranger-admin", stack_version)
+    stack_select.select_packages(params.version)
     conf_select.select(stack_name, "ranger-admin", stack_version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_usersync.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_usersync.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_usersync.py
index 7ccf12d..10eec12 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_usersync.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_usersync.py
@@ -74,9 +74,5 @@ class RangerUsersync(Script):
       from setup_ranger_xml import ranger
       ranger('ranger_usersync', upgrade_type=upgrade_type)
 
-  def get_component_name(self):
-    return "ranger-usersync"
-
-
 if __name__ == "__main__":
   RangerUsersync().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
index 098b5cf..75b1fbe 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
@@ -27,4 +27,4 @@ def prestart(env, iop_component):
   import params
 
   conf_select.select(params.stack_name, iop_component, params.version)
-  stack_select.select(iop_component, params.version)
+  stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/kms_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/kms_server.py
index f05124b..0239350 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/kms_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/kms_server.py
@@ -30,9 +30,6 @@ import upgrade
 
 class KmsServer(Script):
 
-  def get_component_name(self):
-    return "ranger-kms"
-
   def install(self, env):
     self.install_packages(env)
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
index 3aed713..b37c295 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
@@ -26,4 +26,4 @@ def prestart(env, iop_component):
   import params
 
   conf_select.select(params.stack_name, iop_component, params.version)
-  stack_select.select(iop_component, params.version)
+  stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
index 92c67ff..33e2588 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
@@ -31,23 +31,19 @@ class SliderClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SliderClientLinux(SliderClient):
-  def get_component_name(self):
-    return "slider-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "slider", params.version)
-      stack_select.select("slider-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set slider-client {version}"))
 
       # also set all of the hadoop clients since slider client is upgraded as
       # part of the final "CLIENTS" group and we need to ensure that
       # hadoop-client is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
index 07b25a4..6b4454e 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
@@ -39,7 +39,7 @@ class SolrServer(Script):
     import params
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      stack_select.select("solr-server", params.version)
+      stack_select.select_packages(params.version)
       conf_select.select(params.stack_name, "solr", params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
index 2038369..50fef1a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
@@ -32,16 +32,13 @@ from spark import *
 
 class JobHistoryServer(Script):
 
-  def get_component_name(self):
-     return "spark-historyserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-historyserver", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set spark-historyserver {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
index 9ec8525..afd9238 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
@@ -31,16 +31,13 @@ from spark import spark
 
 class SparkClient(Script):
 
-  def get_component_name(self):
-    return "spark-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set spark-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
index a0226b5..a680701 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
@@ -33,17 +33,13 @@ from spark import *
 
 class ThriftServer(Script):
 
-  def get_component_name(self):
-    return "spark-thriftserver"
-
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-thriftserver", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
index b1f658d..211c917 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
@@ -24,9 +24,6 @@ from resource_management import *
 
 class SqoopServiceCheck(Script):
 
-  def get_component_name(self):
-    return "sqoop-server"
-
   def service_check(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
index 4dec354..7b48dfe 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
@@ -29,16 +29,13 @@ from sqoop import sqoop
 
 class SqoopClient(Script):
 
-  def get_component_name(self):
-    return "sqoop-client"
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "sqoop", params.version)
-      stack_select.select("sqoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set sqoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SYSTEMML/package/scripts/systemml_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SYSTEMML/package/scripts/systemml_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SYSTEMML/package/scripts/systemml_client.py
index 2d45b68..8ae1447 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SYSTEMML/package/scripts/systemml_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SYSTEMML/package/scripts/systemml_client.py
@@ -26,16 +26,13 @@ from resource_management.libraries.functions import stack_select
 
 class SystemMLClient(Script):
 
-  def get_component_name(self):
-    return "systemml-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       #conf_select.select(params.stack_name, "systemml", params.version)
-      stack_select.select("systemml-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
index d54ccee..814fb38 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
@@ -39,16 +39,13 @@ class TitanClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class TitanClientLinux(TitanClient):
-    def get_component_name(self):
-        return "titan-client"
-
     def pre_rolling_restart(self, env):
         import params
         env.set_params(params)
 
         if params.version and compare_versions(format_stack_version(params.version), '4.2.0.0') >= 0:
             conf_select.select(params.stack_name, "titan", params.version)
-            stack_select.select("titan-client", params.version)
+            stack_select.select_packages(params.version)
 
     def install(self, env):
         self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
index d2c7b66..fbac488 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
@@ -33,9 +33,6 @@ from service import service
 
 class ApplicationTimelineServer(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-timelineserver"
-
   def install(self, env):
     self.install_packages(env)
     #self.configure(env)
@@ -52,7 +49,7 @@ class ApplicationTimelineServer(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-timelineserver", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-timelineserver {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
index f97bdf3..5e1cfea 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
@@ -40,9 +40,6 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 
 class HistoryServer(Script):
 
-  def get_component_name(self):
-    return "hadoop-mapreduce-historyserver"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -58,7 +55,7 @@ class HistoryServer(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-mapreduce-historyserver", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-mapreduce-historyserver {version}"))
       #copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
       # MC Hammer said, "Can't touch this"


[4/5] ambari git commit: AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
index 4ca9304..0ac1679 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
@@ -122,9 +122,6 @@ class LivyServer(Script):
       else:
         Logger.info("DFS directory '" + dir_path + "' exists.")
 
-  def get_component_name(self):
-    return "livy-server"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -132,7 +129,7 @@ class LivyServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Livy Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("livy-server", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
index 31bf4c6..a2e26b8 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
@@ -51,9 +51,6 @@ class SparkClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
   
-  def get_component_name(self):
-    return "spark-client"
-
   def stack_upgrade_save_new_config(self, env):
     """
     Because this gets called during a Rolling Upgrade, the new configs have already been saved, so we must be
@@ -82,7 +79,7 @@ class SparkClient(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Client Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-client", params.version)
+      stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   SparkClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py
index 0c82e6f..44a156f 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py
@@ -63,9 +63,6 @@ class SparkThriftServer(Script):
     env.set_params(status_params)
     check_process_status(status_params.spark_thrift_server_pid_file)
 
-  def get_component_name(self):
-    return "spark-thriftserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -73,7 +70,7 @@ class SparkThriftServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Thrift Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-thriftserver", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
index d2b32ae..793ffa3 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
@@ -68,9 +68,6 @@ class JobHistoryServer(Script):
     check_process_status(status_params.spark_history_server_pid_file)
     
 
-  def get_component_name(self):
-    return "spark2-historyserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -78,7 +75,7 @@ class JobHistoryServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark2 Job History Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark2", params.version)
-      stack_select.select("spark2-historyserver", params.version)
+      stack_select.select_packages(params.version)
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
       # need to copy the tarball, otherwise, copy it.

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
index cb4f5ee..79022b8 100644
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
@@ -125,9 +125,6 @@ class LivyServer(Script):
       else:
         Logger.info("DFS directory '" + dir_path + "' exists.")
 
-  def get_component_name(self):
-    return "livy2-server"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -135,7 +132,7 @@ class LivyServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Livy2 Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark2", params.version)
-      stack_select.select("livy2-server", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_client.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_client.py
index 0a43750..1db4429 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_client.py
@@ -44,9 +44,6 @@ class SparkClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
   
-  def get_component_name(self):
-    return "spark2-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -54,7 +51,7 @@ class SparkClient(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark2 Client Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark2-client", params.version)
+      stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   SparkClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py
index 6fdd324..e0dc11d 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py
@@ -63,16 +63,13 @@ class SparkThriftServer(Script):
     env.set_params(status_params)
     check_process_status(status_params.spark_thrift_server_pid_file)
 
-  def get_component_name(self):
-    return "spark2-thriftserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     Logger.info("Executing Spark2 Thrift Server Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "spark2", params.version)
-    stack_select.select("spark2-thriftserver", params.version)
+    stack_select.select_packages(params.version)
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
index bb503f5..5b48ffd 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
@@ -33,9 +33,6 @@ class SqoopServiceCheck(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SqoopServiceCheckDefault(SqoopServiceCheck):
 
-  def get_component_name(self):
-    return "sqoop-server"
-
   def service_check(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
index d420fab..8c7dd35 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
@@ -46,16 +46,13 @@ class SqoopClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SqoopClientDefault(SqoopClient):
-  def get_component_name(self):
-    return "sqoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
       conf_select.select(params.stack_name, "sqoop", params.version)
-      stack_select.select("sqoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py
index d0af415..7da3fa1 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py
@@ -36,9 +36,6 @@ from resource_management.libraries.functions.security_commons import build_expec
 
 class DrpcServer(Script):
 
-  def get_component_name(self):
-    return "storm-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -55,7 +52,7 @@ class DrpcServer(Script):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "storm", params.version)
-      stack_select.select("storm-client", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py
index ab6dc3e..53b965e 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py
@@ -38,9 +38,6 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 from resource_management.core.resources.service import Service
 
 class Nimbus(Script):
-  def get_component_name(self):
-    return "storm-nimbus"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -59,9 +56,7 @@ class NimbusDefault(Nimbus):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "storm", params.version)
-      stack_select.select("storm-client", params.version)
-      stack_select.select("storm-nimbus", params.version)
-
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py
index 39bda4d..18d8fea 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py
@@ -31,9 +31,6 @@ from resource_management.libraries.functions import StackFeature
 
 class Nimbus(Script):
 
-  def get_component_name(self):
-    return "storm-nimbus"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -50,8 +47,7 @@ class Nimbus(Script):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "storm", params.version)
-      stack_select.select("storm-client", params.version)
-      stack_select.select("storm-nimbus", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py
index 6da204e..fc45bc1 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py
@@ -36,9 +36,6 @@ from resource_management.libraries.functions.security_commons import build_expec
 
 class PaceMaker(Script):
 
-  def get_component_name(self):
-      return "storm-client"
-
   def install(self, env):
       self.install_packages(env)
       self.configure(env)
@@ -54,7 +51,7 @@ class PaceMaker(Script):
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
         conf_select.select(params.stack_name, "storm", params.version)
-        stack_select.select("storm-client", params.version)
+        stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
       import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py
index 26fdb27..fd301d9 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/rest_api.py
@@ -38,9 +38,6 @@ class StormRestApi(Script):
   In HDP 2.2, it was removed since the functionality was moved to Storm UI Server.
   """
 
-  def get_component_name(self):
-    return "storm-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py
index da900f9..318bd0b 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py
@@ -35,9 +35,6 @@ from resource_management.core.resources.service import Service
 
 
 class Supervisor(Script):
-  def get_component_name(self):
-    return "storm-supervisor"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -77,8 +74,7 @@ class SupervisorDefault(Supervisor):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "storm", params.version)
-      stack_select.select("storm-client", params.version)
-      stack_select.select("storm-supervisor", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor_prod.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor_prod.py
index d6c3545..46626bc 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor_prod.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor_prod.py
@@ -33,9 +33,6 @@ from resource_management.libraries.functions import StackFeature
 
 class Supervisor(Script):
 
-  def get_component_name(self):
-    return "storm-supervisor"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -51,8 +48,7 @@ class Supervisor(Script):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "storm", params.version)
-      stack_select.select("storm-client", params.version)
-      stack_select.select("storm-supervisor", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py
index 5d0c303..90ec64c 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py
@@ -42,9 +42,6 @@ from resource_management.core.resources.service import Service
 
 class UiServer(Script):
 
-  def get_component_name(self):
-    return "storm-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -82,7 +79,7 @@ class UiServerDefault(UiServer):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "storm", params.version)
-      stack_select.select("storm-client", params.version)
+      stack_select.select_packages(params.version)
 
   def link_metrics_sink_jar(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SYSTEMML/0.10.0/package/scripts/systemml_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SYSTEMML/0.10.0/package/scripts/systemml_client.py b/ambari-server/src/main/resources/common-services/SYSTEMML/0.10.0/package/scripts/systemml_client.py
index 2d45b68..8ae1447 100755
--- a/ambari-server/src/main/resources/common-services/SYSTEMML/0.10.0/package/scripts/systemml_client.py
+++ b/ambari-server/src/main/resources/common-services/SYSTEMML/0.10.0/package/scripts/systemml_client.py
@@ -26,16 +26,13 @@ from resource_management.libraries.functions import stack_select
 
 class SystemMLClient(Script):
 
-  def get_component_name(self):
-    return "systemml-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       #conf_select.select(params.stack_name, "systemml", params.version)
-      stack_select.select("systemml-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
index 8018f0f..8a6a6d3 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
@@ -58,9 +58,6 @@ class TezClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class TezClientLinux(TezClient):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def stack_upgrade_save_new_config(self, env):
     """
     Because this gets called during a Rolling Upgrade, the new tez configs have already been saved, so we must be
@@ -90,7 +87,7 @@ class TezClientLinux(TezClient):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "tez", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_client.py b/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_client.py
index 9bb1aad..2d0a93e 100755
--- a/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_client.py
+++ b/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_client.py
@@ -31,9 +31,6 @@ import titan
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 
 class TitanClient(Script):
-    def get_component_name(self):
-        return "titan-client"
-
     def configure(self, env):
         import params
         env.set_params(params)
@@ -51,7 +48,7 @@ class TitanClientLinux(TitanClient):
 
         if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
             conf_select.select(params.stack_name, "titan", params.version)
-            stack_select.select("titan-client", params.version)
+            stack_select.select_packages(params.version)
 
     def install(self, env):
         self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_server.py b/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_server.py
index 5dcc7e9..427448b 100755
--- a/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_server.py
+++ b/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/titan_server.py
@@ -29,9 +29,6 @@ from titan_service import titan_service
 import titan
 
 class TitanServer(Script):
-  def get_component_name(self):
-    return "titan-server"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -45,7 +42,7 @@ class TitanServer(Script):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      stack_select.select("titan-server", params.version)
+      stack_select.select_packages(params.version)
       conf_select.select(params.stack_name, "titan", params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
index a299e25..c4f3cfe 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
@@ -63,9 +63,6 @@ class ApplicationTimelineServerWindows(ApplicationTimelineServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ApplicationTimelineServerDefault(ApplicationTimelineServer):
-  def get_component_name(self):
-    return "hadoop-yarn-timelineserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -73,7 +70,7 @@ class ApplicationTimelineServerDefault(ApplicationTimelineServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-timelineserver", params.version)
+      stack_select.select_packages(params.version)
 
   def status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index 36417a8..ff50e4e 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -70,9 +70,6 @@ class HistoryserverWindows(HistoryServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HistoryServerDefault(HistoryServer):
-  def get_component_name(self):
-    return "hadoop-mapreduce-historyserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -80,7 +77,7 @@ class HistoryServerDefault(HistoryServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-mapreduce-historyserver", params.version)
+      stack_select.select_packages(params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
       copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
index 424157b..efcb2da 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
@@ -82,16 +82,13 @@ class MapReduce2ClientWindows(MapReduce2Client):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class MapReduce2ClientDefault(MapReduce2Client):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
index 7be7c36..acd0991 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
@@ -65,9 +65,6 @@ class NodemanagerWindows(Nodemanager):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class NodemanagerDefault(Nodemanager):
-  def get_component_name(self):
-    return "hadoop-yarn-nodemanager"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing NodeManager Stack Upgrade pre-restart")
     import params
@@ -75,7 +72,7 @@ class NodemanagerDefault(Nodemanager):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-nodemanager", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing NodeManager Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index ecd8147..2d74aa9 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -105,9 +105,6 @@ class ResourcemanagerWindows(Resourcemanager):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ResourcemanagerDefault(Resourcemanager):
-  def get_component_name(self):
-    return "hadoop-yarn-resourcemanager"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")
     import params
@@ -115,7 +112,7 @@ class ResourcemanagerDefault(Resourcemanager):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-resourcemanager", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
index 4d65a40..d57060c 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
@@ -51,16 +51,13 @@ class YarnClientWindows(YarnClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class YarnClientDefault(YarnClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index ba46dc8..f8f6e3d 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -40,9 +40,6 @@ from resource_management.libraries.script.script import Script
 
 class Master(Script):
 
-  def get_component_name(self):
-    return "zeppelin-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -255,7 +252,7 @@ class Master(Script):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
       conf_select.select(params.stack_name, "zeppelin", params.version)
-      stack_select.select("zeppelin-server", params.version)
+      stack_select.select_packages(params.version)
 
   def set_interpreter_settings(self, config_data):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
index 1d4e89c..6c60e91 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
@@ -55,8 +55,6 @@ class ZookeeperClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ZookeeperClientLinux(ZookeeperClient):
-  def get_component_name(self):
-    return "zookeeper-client"
 
   def install(self, env):
     self.install_packages(env)
@@ -69,7 +67,7 @@ class ZookeeperClientLinux(ZookeeperClient):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-client", params.version)
+      stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class ZookeeperClientWindows(ZookeeperClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
index 0af0345..a42cfb6 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
@@ -64,9 +64,6 @@ class ZookeeperServer(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ZookeeperServerLinux(ZookeeperServer):
 
-  def get_component_name(self):
-    return "zookeeper-server"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -78,7 +75,7 @@ class ZookeeperServerLinux(ZookeeperServer):
 
     if check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     # during an express upgrade, there is no quorum, so don't try to perform the check

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_select_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_select_packages.json b/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_select_packages.json
new file mode 100644
index 0000000..a91a841
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_select_packages.json
@@ -0,0 +1,775 @@
+{
+  "BigInsights": {
+    "stack-select": {
+      "ATLAS": {
+        "ATLAS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "atlas-client",
+          "INSTALL": [
+            "atlas-client"
+          ],
+          "PATCH": [
+            "atlas-client"
+          ],
+          "STANDARD": [
+            "atlas-client"
+          ]
+        },
+        "ATLAS_SERVER": {
+          "STACK-SELECT-PACKAGE": "atlas-server",
+          "INSTALL": [
+            "atlas-server"
+          ],
+          "PATCH": [
+            "atlas-server"
+          ],
+          "STANDARD": [
+            "atlas-server"
+          ]
+        }
+      },
+      "FLUME": {
+        "FLUME_HANDLER": {
+          "STACK-SELECT-PACKAGE": "flume-server",
+          "INSTALL": [
+            "flume-server"
+          ],
+          "PATCH": [
+            "flume-server"
+          ],
+          "STANDARD": [
+            "flume-server"
+          ]
+        }
+      },
+      "HBASE": {
+        "HBASE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hbase-client",
+          "INSTALL": [
+            "hbase-client",
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hbase-client"
+          ],
+          "STANDARD": [
+            "hbase-client",
+            "hadoop-client"
+          ]
+        },
+        "HBASE_MASTER": {
+          "STACK-SELECT-PACKAGE": "hbase-master",
+          "INSTALL": [
+            "hbase-master"
+          ],
+          "PATCH": [
+            "hbase-master"
+          ],
+          "STANDARD": [
+            "hbase-master"
+          ]
+        },
+        "HBASE_REGIONSERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-regionserver",
+          "INSTALL": [
+            "hbase-regionserver"
+          ],
+          "PATCH": [
+            "hbase-regionserver"
+          ],
+          "STANDARD": [
+            "hbase-regionserver"
+          ]
+        },
+        "HBASE_REST_SERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-restserver",
+          "INSTALL": [
+            "hbase-restserver"
+          ],
+          "PATCH": [
+            "hbase-restserver"
+          ],
+          "STANDARD": [
+            "hbase-restserver"
+          ]
+        },
+
+        "PHOENIX_QUERY_SERVER": {
+          "STACK-SELECT-PACKAGE": "phoenix-server",
+          "INSTALL": [
+            "phoenix-server"
+          ],
+          "PATCH": [
+            "phoenix-server"
+          ],
+          "STANDARD": [
+            "phoenix-server"
+          ]
+        }
+      },
+      "HDFS": {
+        "DATANODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
+          "INSTALL": [
+            "hadoop-hdfs-datanode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-datanode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-datanode"
+          ]
+        },
+        "HDFS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-client"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
+          "INSTALL": [
+            "hadoop-hdfs-namenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-namenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-namenode"
+          ]
+        },
+        "NFS_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
+          "INSTALL": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-nfs3"
+          ]
+        },
+        "JOURNALNODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
+          "INSTALL": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-journalnode"
+          ]
+        },
+        "SECONDARY_NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
+          "INSTALL": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-secondarynamenode"
+          ]
+        },
+        "ZKFC": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
+          "INSTALL": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-zkfc"
+          ]
+        }
+      },
+      "HIVE": {
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HIVE_METASTORE": {
+          "STACK-SELECT-PACKAGE": "hive-metastore",
+          "INSTALL": [
+            "hive-metastore"
+          ],
+          "PATCH": [
+            "hive-metastore"
+          ],
+          "STANDARD": [
+            "hive-metastore"
+          ]
+        },
+        "HIVE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-server2",
+          "INSTALL": [
+            "hive-server2"
+          ],
+          "PATCH": [
+            "hive-server2"
+          ],
+          "STANDARD": [
+            "hive-server2"
+          ]
+        },
+        "HIVE_SERVER_INTERACTIVE": {
+          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
+          "INSTALL": [
+            "hive-server2-hive2"
+          ],
+          "PATCH": [
+            "hive-server2-hive2"
+          ],
+          "STANDARD": [
+            "hive-server2-hive2"
+          ]
+        },
+        "HIVE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-client"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        }
+      },
+      "KAFKA": {
+        "KAFKA_BROKER": {
+          "STACK-SELECT-PACKAGE": "kafka-broker",
+          "INSTALL": [
+            "kafka-broker"
+          ],
+          "PATCH": [
+            "kafka-broker"
+          ],
+          "STANDARD": [
+            "kafka-broker"
+          ]
+        }
+      },
+      "KNOX": {
+        "KNOX_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "knox-server",
+          "INSTALL": [
+            "knox-server"
+          ],
+          "PATCH": [
+            "knox-server"
+          ],
+          "STANDARD": [
+            "knox-server"
+          ]
+        }
+      },
+      "MAPREDUCE2": {
+        "HISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
+          "INSTALL": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "STANDARD": [
+            "hadoop-mapreduce-historyserver"
+          ]
+        },
+        "MAPREDUCE2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-client"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "OOZIE": {
+        "OOZIE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "oozie-client",
+          "INSTALL": [
+            "oozie-client"
+          ],
+          "PATCH": [
+            "oozie-client"
+          ],
+          "STANDARD": [
+            "oozie-client"
+          ]
+        },
+        "OOZIE_SERVER": {
+          "STACK-SELECT-PACKAGE": "oozie-server",
+          "INSTALL": [
+            "oozie-client",
+            "oozie-server"
+          ],
+          "PATCH": [
+            "oozie-server"
+          ],
+          "STANDARD": [
+            "oozie-client",
+            "oozie-server"
+          ]
+        }
+      },
+      "PIG": {
+        "PIG": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-client"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "R4ML": {
+        "R4ML": {
+          "STACK-SELECT-PACKAGE": "r4ml-client",
+          "INSTALL": [
+            "r4ml-client"
+          ],
+          "PATCH": [
+            "r4ml-client"
+          ],
+          "STANDARD": [
+            "r4ml-client"
+          ]
+        }
+      },
+      "RANGER": {
+        "RANGER_ADMIN": {
+          "STACK-SELECT-PACKAGE": "ranger-admin",
+          "INSTALL": [
+            "ranger-admin"
+          ],
+          "PATCH": [
+            "ranger-admin"
+          ],
+          "STANDARD": [
+            "ranger-admin"
+          ]
+        },
+        "RANGER_TAGSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-tagsync",
+          "INSTALL": [
+            "ranger-tagsync"
+          ],
+          "PATCH": [
+            "ranger-tagsync"
+          ],
+          "STANDARD": [
+            "ranger-tagsync"
+          ]
+        },
+        "RANGER_USERSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-usersync",
+          "INSTALL": [
+            "ranger-usersync"
+          ],
+          "PATCH": [
+            "ranger-usersync"
+          ],
+          "STANDARD": [
+            "ranger-usersync"
+          ]
+        }
+      },
+      "RANGER_KMS": {
+        "RANGER_KMS_SERVER": {
+          "STACK-SELECT-PACKAGE": "ranger-kms",
+          "INSTALL": [
+            "ranger-kms"
+          ],
+          "PATCH": [
+            "ranger-kms"
+          ],
+          "STANDARD": [
+            "ranger-kms"
+          ]
+        }
+      },
+      "SLIDER": {
+        "SLIDER": {
+          "STACK-SELECT-PACKAGE": "slider-client",
+          "INSTALL": [
+            "slider-client",
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "slider-client"
+          ],
+          "STANDARD": [
+            "slider-client",
+            "hadoop-client"
+          ]
+        }
+      },
+      "SOLR": {
+        "SOLR": {
+          "STACK-SELECT-PACKAGE": "solr-server",
+          "INSTALL": [
+            "solr-server"
+          ],
+          "PATCH": [
+            "solr-server"
+          ],
+          "STANDARD": [
+            "solr-server"
+          ]
+        }
+      },
+      "SPARK": {
+        "LIVY_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy-server",
+          "INSTALL": [
+            "livy-server"
+          ],
+          "PATCH": [
+            "livy-server"
+          ],
+          "STANDARD": [
+            "livy-server"
+          ]
+        },
+        "SPARK_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark-client",
+          "INSTALL": [
+            "spark-client"
+          ],
+          "PATCH": [
+            "spark-client"
+          ],
+          "STANDARD": [
+            "spark-client"
+          ]
+        },
+        "SPARK_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-historyserver",
+          "INSTALL": [
+            "spark-historyserver"
+          ],
+          "PATCH": [
+            "spark-historyserver"
+          ],
+          "STANDARD": [
+            "spark-historyserver"
+          ]
+        },
+        "SPARK_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-thriftserver",
+          "INSTALL": [
+            "spark-thriftserver"
+          ],
+          "PATCH": [
+            "spark-thriftserver"
+          ],
+          "STANDARD": [
+            "spark-thriftserver"
+          ]
+        }
+      },
+      "SPARK2": {
+        "LIVY2_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy2-server",
+          "INSTALL": [
+            "livy2-server"
+          ],
+          "PATCH": [
+            "livy2-server"
+          ],
+          "STANDARD": [
+            "livy2-server"
+          ]
+        },
+        "SPARK2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark2-client",
+          "INSTALL": [
+            "spark2-client"
+          ],
+          "PATCH": [
+            "spark2-client"
+          ],
+          "STANDARD": [
+            "spark2-client"
+          ]
+        },
+        "SPARK2_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-historyserver",
+          "INSTALL": [
+            "spark2-historyserver"
+          ],
+          "PATCH": [
+            "spark2-historyserver"
+          ],
+          "STANDARD": [
+            "spark2-historyserver"
+          ]
+        },
+        "SPARK2_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
+          "INSTALL": [
+            "spark2-thriftserver"
+          ],
+          "PATCH": [
+            "spark2-thriftserver"
+          ],
+          "STANDARD": [
+            "spark2-thriftserver"
+          ]
+        }
+      },
+      "SQOOP": {
+        "SQOOP": {
+          "STACK-SELECT-PACKAGE": "sqoop-client",
+          "INSTALL": [
+            "sqoop-client"
+          ],
+          "PATCH": [
+            "sqoop-client"
+          ],
+          "STANDARD": [
+            "sqoop-client"
+          ]
+        }
+      },
+      "STORM": {
+        "NIMBUS": {
+          "STACK-SELECT-PACKAGE": "storm-nimbus",
+          "INSTALL": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "PATCH": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-nimbus"
+          ]
+        },
+        "SUPERVISOR": {
+          "STACK-SELECT-PACKAGE": "storm-supervisor",
+          "INSTALL": [
+            "storm-supervisor"
+          ],
+          "PATCH": [
+            "storm-supervisor"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-supervisor"
+          ]
+        },
+        "DRPC_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        },
+        "STORM_UI_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        }
+      },
+      "SYSTEMML": {
+        "SYSTEMML": {
+          "STACK-SELECT-PACKAGE": "systemml-client",
+          "INSTALL": [
+            "systemml-client"
+          ],
+          "PATCH": [
+            "systemml-client"
+          ],
+          "STANDARD": [
+            "systemml-client"
+          ]
+        }
+      },
+      "TEZ": {
+        "TEZ_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-client"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "TITAN": {
+        "TITAN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "titan-client",
+          "INSTALL": [
+            "titan-client"
+          ],
+          "PATCH": [
+            "titan-client"
+          ],
+          "STANDARD": [
+            "titan-client"
+          ]
+        },
+        "TITAN_SERVER": {
+          "STACK-SELECT-PACKAGE": "titan-server",
+          "INSTALL": [
+            "titan-server"
+          ],
+          "PATCH": [
+            "titan-server"
+          ],
+          "STANDARD": [
+            "titan-server"
+          ]
+        }
+      },
+      "YARN": {
+        "APP_TIMELINE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
+          "INSTALL": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "PATCH": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-timelineserver"
+          ]
+        },
+        "NODEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
+          "INSTALL": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-nodemanager"
+          ]
+        },
+        "RESOURCEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
+          "INSTALL": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-resourcemanager"
+          ]
+        },
+        "YARN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-client"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "ZEPPELIN": {
+        "ZEPPELIN_MASTER": {
+          "STACK-SELECT-PACKAGE": "zeppelin-server",
+          "INSTALL": [
+            "zeppelin-server"
+          ],
+          "PATCH": [
+            "zeppelin-server"
+          ],
+          "STANDARD": [
+            "zeppelin-server"
+          ]
+        }
+      },
+      "ZOOKEEPER": {
+        "ZOOKEEPER_CLIENT": {
+          "STACK-SELECT-PACKAGE": "zookeeper-client",
+          "INSTALL": [
+            "zookeeper-client"
+          ],
+          "PATCH": [
+            "zookeeper-client"
+          ],
+          "STANDARD": [
+            "zookeeper-client"
+          ]
+        },
+        "ZOOKEEPER_SERVER": {
+          "STACK-SELECT-PACKAGE": "zookeeper-server",
+          "INSTALL": [
+            "zookeeper-server"
+          ],
+          "PATCH": [
+            "zookeeper-server"
+          ],
+          "STANDARD": [
+            "zookeeper-server"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/FLUME/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/FLUME/package/scripts/flume_handler.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/FLUME/package/scripts/flume_handler.py
index d0c6e3b..159278a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/FLUME/package/scripts/flume_handler.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/FLUME/package/scripts/flume_handler.py
@@ -34,9 +34,6 @@ from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 
 class FlumeHandler(Script):
 
-  def get_component_name(self):
-    return "flume-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -137,7 +134,7 @@ class FlumeHandler(Script):
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "flume", params.version)
-    stack_select.select("flume-server", params.version)
+    stack_select.select_packages(params.version)
     if params.upgrade_direction == Direction.UPGRADE:
       flume_upgrade.pre_start_restore()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_client.py
index 3ed6103..df382ce 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_client.py
@@ -28,23 +28,19 @@ from hbase import hbase
 
 class HbaseClient(Script):
 
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hbase-client {version}"))
 
       # set all of the hadoop clientss since hbase client is upgraded as part
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
       #Execute(format("stack-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_master.py
index b1b6cc4..b20180e 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_master.py
@@ -29,10 +29,6 @@ from hbase_decommission import hbase_decommission
 import upgrade
 
 class HbaseMaster(Script):
-
-  def get_component_name(self):
-    return "hbase-master"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -45,7 +41,7 @@ class HbaseMaster(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_regionserver.py
index 696b173..48f0502 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_regionserver.py
@@ -30,9 +30,6 @@ import upgrade
 
 class HbaseRegionServer(Script):
 
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -45,7 +42,7 @@ class HbaseRegionServer(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_restgatewayserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_restgatewayserver.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_restgatewayserver.py
index f8dc53d..6280e9a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_restgatewayserver.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/hbase_restgatewayserver.py
@@ -29,9 +29,6 @@ import upgrade
 
 
 class HbaseRestGatewayServer(Script):
-  def get_component_name(self):
-    return "hbase-restserver"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -44,7 +41,7 @@ class HbaseRestGatewayServer(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-restserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/upgrade.py
index 11b770d..f1024c5 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HBASE/package/scripts/upgrade.py
@@ -26,12 +26,12 @@ from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.decorator import retry
 
-def prestart(env, component):
+def prestart(env):
   import params
 
   if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/datanode.py
index 5d1e18b..b1f1d00 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/datanode.py
@@ -61,16 +61,13 @@ class DataNode(Script):
     env.set_params(status_params)
     check_process_status(status_params.datanode_pid_file)
 
-  def get_component_name(self):
-    return "hadoop-hdfs-datanode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade pre-restart")
     import params
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-hdfs-datanode {version}"))
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/hdfs_client.py
index 39cd1f4..851afd6 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/hdfs_client.py
@@ -29,9 +29,6 @@ from utils import service
 
 class HdfsClient(Script):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def install(self, env):
     import params
 
@@ -44,7 +41,7 @@ class HdfsClient(Script):
     env.set_params(params)
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=False):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/journalnode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/journalnode.py
index 1bbbd50..afc54be 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/journalnode.py
@@ -34,9 +34,6 @@ import journalnode_upgrade
 
 class JournalNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-journalnode"
-
   def install(self, env):
     import params
 
@@ -50,7 +47,7 @@ class JournalNode(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-hdfs-journalnode {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/namenode.py
index 46bd926..38ad927 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/namenode.py
@@ -55,9 +55,6 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-namenode"
-
   def install(self, env):
     import params
 
@@ -88,7 +85,7 @@ class NameNode(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-namenode", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hadoop-hdfs-namenode {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/nfsgateway.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/nfsgateway.py
index ff4778a..8edd037 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/nfsgateway.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/nfsgateway.py
@@ -31,9 +31,6 @@ from resource_management.libraries.functions.version import compare_versions, fo
 
 class NFSGateway(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-nfs3"
-
   def install(self, env):
     import params
 
@@ -47,7 +44,7 @@ class NFSGateway(Script):
 
     if Script.is_stack_greater_or_equal('4.1.0.0'):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/snamenode.py
index 4224a9e..1fe89a2 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HDFS/package/scripts/snamenode.py
@@ -31,9 +31,6 @@ from hdfs import hdfs
 
 class SNameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-secondarynamenode"
-
   def install(self, env):
     import params
 
@@ -48,7 +45,7 @@ class SNameNode(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hcat_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hcat_client.py
index e914cf6..8ae64b9 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hcat_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hcat_client.py
@@ -42,9 +42,7 @@ class HCatClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
+  pass
 
 if __name__ == "__main__":
   HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_client.py
index 8d586da..5ec624c 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_client.py
@@ -42,9 +42,6 @@ class HiveClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveClientDefault(HiveClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -52,7 +49,7 @@ class HiveClientDefault(HiveClient):
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     """
@@ -75,7 +72,7 @@ class HiveClientDefault(HiveClient):
     # HCat client doesn't have a first-class entry in hdp-select. Since clients always
     # update after daemons, this ensures that the hcat directories are correct on hosts
     # which do not include the WebHCat daemon
-    stack_select.select("hive-webhcat", params.version)
+    stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
index 0a9a066..8fcd4e4 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
@@ -67,9 +67,6 @@ class HiveMetastore(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveMetastoreDefault(HiveMetastore):
-  def get_component_name(self):
-    return "hive-metastore"
-
   def status(self, env):
     import status_params
     from resource_management.libraries.functions import check_process_status
@@ -89,7 +86,7 @@ class HiveMetastoreDefault(HiveMetastore):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-metastore", params.version)
+      stack_select.select_packages(params.version)
 
   def security_status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server.py
index 5d2dc9f..269e8978 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server.py
@@ -56,9 +56,6 @@ class HiveServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerDefault(HiveServer):
-  def get_component_name(self):
-    return "hive-server2"
-
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -95,7 +92,7 @@ class HiveServerDefault(HiveServer):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-server2", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("stack-select set hive-server2 {version}"))
       resource_created = copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
       if resource_created:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/webhcat_server.py
index 7496649..627c96e 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/webhcat_server.py
@@ -54,9 +54,6 @@ class WebHCatServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class WebHCatServerDefault(WebHCatServer):
-  def get_component_name(self):
-    return "hive-webhcat"
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -70,7 +67,7 @@ class WebHCatServerDefault(WebHCatServer):
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hive-webhcat", params.version)
+      stack_select.select_packages(params.version)
 
   def security_status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
index 8799658..9fad6a7 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
@@ -36,9 +36,6 @@ from kafka import kafka
 
 class KafkaBroker(Script):
 
-  def get_component_name(self):
-    return "kafka-broker"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -57,7 +54,7 @@ class KafkaBroker(Script):
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      stack_select.select("kafka-broker", params.version)
+      stack_select.select_packages(params.version)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       conf_select.select(params.stack_name, "kafka", params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
index 16fc526..5628297 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
@@ -80,9 +80,9 @@ def run_migration(env, upgrade_type):
       Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script))
 
 
-def prestart(env, component):
+def prestart(env):
   import params
 
   if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
     conf_select.select(params.stack_name, "kafka", params.version)
-    stack_select.select(component, params.version)
+    stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KNOX/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KNOX/package/scripts/knox_gateway.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KNOX/package/scripts/knox_gateway.py
index 6cc97ac..99e0086 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KNOX/package/scripts/knox_gateway.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KNOX/package/scripts/knox_gateway.py
@@ -43,10 +43,6 @@ from knox_ldap import ldap
 
 class KnoxGateway(Script):
 
-
-  def get_component_name(self):
-    return "knox-server"
-
   def install(self, env):
     self.install_packages(env)
     import params
@@ -115,7 +111,7 @@ class KnoxGateway(Script):
 
       # conf-select will change the symlink to the conf folder.
       conf_select.select(params.stack_name, "knox", params.version)
-      stack_select.select("knox-server", params.version)
+      stack_select.select_packages(params.version)
 
       # Extract the tar of the old conf folder into the new conf directory
       if absolute_backup_dir is not None and params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_client.py
index b36d2ec..a7a205b 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/OOZIE/package/scripts/oozie_client.py
@@ -29,9 +29,6 @@ from oozie_service import oozie_service
 
 class OozieClient(Script):
 
-  def get_component_name(self):
-    return "oozie-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -58,7 +55,7 @@ class OozieClient(Script):
 
     Logger.info("Executing Oozie Client Rolling Upgrade pre-restart")
     conf_select.select(params.stack_name, "oozie", params.version)
-    stack_select.select("oozie-client", params.version)
+    stack_select.select_packages(params.version)
     #Execute(format("stack-select set oozie-client {version}"))
 
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).


[5/5] ambari git commit: AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Posted by jo...@apache.org.
AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/56d2ade2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/56d2ade2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/56d2ade2

Branch: refs/heads/branch-feature-AMBARI-21450
Commit: 56d2ade2b78adf170a92a99dbbd8d5d1a074389a
Parents: 863343a
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Jul 26 15:01:33 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Jul 27 15:29:07 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/stack_select.py         | 171 +++-
 .../libraries/script/script.py                  |  23 +-
 .../BlueprintConfigurationProcessor.java        |   5 +-
 .../ambari/server/state/ConfigHelper.java       |   1 +
 .../server/upgrade/FinalUpgradeCatalog.java     |   4 +-
 .../package/scripts/accumulo_client.py          |   5 +-
 .../package/scripts/accumulo_script.py          |  34 +-
 .../0.1.0.2.3/package/scripts/atlas_client.py   |   5 +-
 .../package/scripts/metadata_server.py          |   6 +-
 .../DRUID/0.9.2/package/scripts/druid_node.py   |   6 +-
 .../DRUID/0.9.2/package/scripts/superset.py     |   5 +-
 .../0.5.0.2.1/package/scripts/falcon_client.py  |   5 +-
 .../0.5.0.2.1/package/scripts/falcon_server.py  |   5 +-
 .../1.4.0.2.0/package/scripts/flume_handler.py  |   5 +-
 .../0.96.0.2.0/package/scripts/hbase_client.py  |   7 +-
 .../0.96.0.2.0/package/scripts/hbase_master.py  |   5 +-
 .../package/scripts/hbase_regionserver.py       |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../HBASE/0.96.0.2.0/package/scripts/upgrade.py |   4 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |   7 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |   5 +-
 .../2.1.0.2.0/package/scripts/journalnode.py    |   5 +-
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |   7 +-
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |   6 +-
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |   5 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |   9 +-
 .../0.12.0.2.0/package/scripts/hcat_client.py   |   8 +-
 .../0.12.0.2.0/package/scripts/hive_client.py   |   5 +-
 .../package/scripts/hive_metastore.py           |   6 +-
 .../0.12.0.2.0/package/scripts/hive_server.py   |   5 +-
 .../package/scripts/hive_server_interactive.py  |   6 +-
 .../package/scripts/webhcat_server.py           |   5 +-
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |   5 +-
 .../0.5.0.2.2/package/scripts/knox_gateway.py   |   5 +-
 .../1.0.0.2.3/package/scripts/mahout_client.py  |   7 +-
 .../4.0.0.2.0/package/scripts/oozie_client.py   |   5 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   8 +-
 .../0.12.0.2.0/package/scripts/pig_client.py    |   5 +-
 .../R4ML/0.8.0/package/scripts/r4ml_client.py   |   5 +-
 .../0.4.0/package/scripts/ranger_admin.py       |   5 +-
 .../0.4.0/package/scripts/ranger_tagsync.py     |   7 +-
 .../0.4.0/package/scripts/ranger_usersync.py    |   3 -
 .../RANGER/0.4.0/package/scripts/upgrade.py     |   2 +-
 .../0.5.0.2.3/package/scripts/kms_server.py     |   3 -
 .../0.5.0.2.3/package/scripts/upgrade.py        |   2 +-
 .../0.60.0.2.2/package/scripts/slider_client.py |   6 +-
 .../1.2.1/package/scripts/job_history_server.py |   5 +-
 .../SPARK/1.2.1/package/scripts/livy_server.py  |   5 +-
 .../SPARK/1.2.1/package/scripts/spark_client.py |   5 +-
 .../package/scripts/spark_thrift_server.py      |   5 +-
 .../2.0.0/package/scripts/job_history_server.py |   5 +-
 .../2.0.0/package/scripts/livy2_server.py       |   5 +-
 .../2.0.0/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   5 +-
 .../1.4.4.2.0/package/scripts/service_check.py  |   3 -
 .../1.4.4.2.0/package/scripts/sqoop_client.py   |   5 +-
 .../STORM/0.9.1/package/scripts/drpc_server.py  |   5 +-
 .../STORM/0.9.1/package/scripts/nimbus.py       |   7 +-
 .../STORM/0.9.1/package/scripts/nimbus_prod.py  |   6 +-
 .../STORM/0.9.1/package/scripts/pacemaker.py    |   5 +-
 .../STORM/0.9.1/package/scripts/rest_api.py     |   3 -
 .../STORM/0.9.1/package/scripts/supervisor.py   |   6 +-
 .../0.9.1/package/scripts/supervisor_prod.py    |   6 +-
 .../STORM/0.9.1/package/scripts/ui_server.py    |   5 +-
 .../0.10.0/package/scripts/systemml_client.py   |   5 +-
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py |   5 +-
 .../TITAN/1.0.0/package/scripts/titan_client.py |   5 +-
 .../TITAN/1.0.0/package/scripts/titan_server.py |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../2.1.0.2.0/package/scripts/historyserver.py  |   5 +-
 .../package/scripts/mapreduce2_client.py        |   5 +-
 .../2.1.0.2.0/package/scripts/nodemanager.py    |   5 +-
 .../package/scripts/resourcemanager.py          |   5 +-
 .../2.1.0.2.0/package/scripts/yarn_client.py    |   5 +-
 .../0.6.0.2.5/package/scripts/master.py         |   5 +-
 .../3.4.5/package/scripts/zookeeper_client.py   |   4 +-
 .../3.4.5/package/scripts/zookeeper_server.py   |   5 +-
 .../4.0/properties/stack_select_packages.json   | 775 +++++++++++++++
 .../FLUME/package/scripts/flume_handler.py      |   5 +-
 .../HBASE/package/scripts/hbase_client.py       |   6 +-
 .../HBASE/package/scripts/hbase_master.py       |   6 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   5 +-
 .../package/scripts/hbase_restgatewayserver.py  |   5 +-
 .../services/HBASE/package/scripts/upgrade.py   |   4 +-
 .../services/HDFS/package/scripts/datanode.py   |   5 +-
 .../HDFS/package/scripts/hdfs_client.py         |   5 +-
 .../HDFS/package/scripts/journalnode.py         |   5 +-
 .../services/HDFS/package/scripts/namenode.py   |   5 +-
 .../services/HDFS/package/scripts/nfsgateway.py |   5 +-
 .../services/HDFS/package/scripts/snamenode.py  |   5 +-
 .../HIVE/package/scripts/hcat_client.py         |   4 +-
 .../HIVE/package/scripts/hive_client.py         |   7 +-
 .../HIVE/package/scripts/hive_metastore.py      |   5 +-
 .../HIVE/package/scripts/hive_server.py         |   5 +-
 .../HIVE/package/scripts/webhcat_server.py      |   5 +-
 .../KAFKA/package/scripts/kafka_broker.py       |   5 +-
 .../services/KAFKA/package/scripts/upgrade.py   |   4 +-
 .../KNOX/package/scripts/knox_gateway.py        |   6 +-
 .../OOZIE/package/scripts/oozie_client.py       |   5 +-
 .../OOZIE/package/scripts/oozie_server.py       |   8 +-
 .../services/PIG/package/scripts/pig_client.py  |   5 +-
 .../SLIDER/package/scripts/slider_client.py     |   6 +-
 .../SOLR/package/scripts/solr_server.py         |   2 +-
 .../SPARK/package/scripts/job_history_server.py |   5 +-
 .../SPARK/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   6 +-
 .../SQOOP/package/scripts/service_check.py      |   3 -
 .../SQOOP/package/scripts/sqoop_client.py       |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../YARN/package/scripts/historyserver.py       |   5 +-
 .../YARN/package/scripts/mapreduce2_client.py   |   5 +-
 .../YARN/package/scripts/nodemanager.py         |   5 +-
 .../YARN/package/scripts/resourcemanager.py     |   5 +-
 .../YARN/package/scripts/yarn_client.py         |   5 +-
 .../ZOOKEEPER/package/scripts/zookeeper.py      |   2 +-
 .../package/scripts/zookeeper_client.py         |   5 +-
 .../package/scripts/zookeeper_server.py         |   5 +-
 .../scripts/shared_initialization.py            |  19 +-
 .../HBASE/package/scripts/hbase_client.py       |   8 +-
 .../HBASE/package/scripts/hbase_master.py       |   5 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   5 +-
 .../package/scripts/hbase_restgatewayserver.py  |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../services/HBASE/package/scripts/upgrade.py   |   4 +-
 .../FLUME/package/scripts/flume_handler.py      |   5 +-
 .../HBASE/package/scripts/hbase_client.py       |   7 +-
 .../HBASE/package/scripts/hbase_master.py       |   5 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   5 +-
 .../package/scripts/hbase_restgatewayserver.py  |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   7 +-
 .../services/HBASE/package/scripts/upgrade.py   |   4 +-
 .../services/HDFS/package/scripts/datanode.py   |   7 +-
 .../HDFS/package/scripts/hdfs_client.py         |   5 +-
 .../HDFS/package/scripts/journalnode.py         |   5 +-
 .../services/HDFS/package/scripts/namenode.py   |   7 +-
 .../services/HDFS/package/scripts/nfsgateway.py |   5 +-
 .../services/HDFS/package/scripts/snamenode.py  |   5 +-
 .../HIVE/package/scripts/hcat_client.py         |   4 +-
 .../HIVE/package/scripts/hive_client.py         |   7 +-
 .../HIVE/package/scripts/hive_metastore.py      |   5 +-
 .../HIVE/package/scripts/hive_server.py         |   5 +-
 .../HIVE/package/scripts/webhcat_server.py      |   5 +-
 .../KAFKA/package/scripts/kafka_broker.py       |   5 +-
 .../KNOX/package/scripts/knox_gateway.py        |   7 +-
 .../OOZIE/package/scripts/oozie_client.py       |   5 +-
 .../OOZIE/package/scripts/oozie_server.py       |  10 +-
 .../services/PIG/package/scripts/pig_client.py  |   5 +-
 .../RANGER/package/scripts/ranger_admin.py      |   5 +-
 .../RANGER/package/scripts/ranger_usersync.py   |   4 -
 .../services/RANGER/package/scripts/upgrade.py  |   2 +-
 .../RANGER_KMS/package/scripts/kms_server.py    |   3 -
 .../RANGER_KMS/package/scripts/upgrade.py       |   2 +-
 .../SLIDER/package/scripts/slider_client.py     |   6 +-
 .../SOLR/package/scripts/solr_server.py         |   2 +-
 .../SPARK/package/scripts/job_history_server.py |   5 +-
 .../SPARK/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   6 +-
 .../SQOOP/package/scripts/service_check.py      |   3 -
 .../SQOOP/package/scripts/sqoop_client.py       |   5 +-
 .../SYSTEMML/package/scripts/systemml_client.py |   5 +-
 .../TITAN/package/scripts/titan_client.py       |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../YARN/package/scripts/historyserver.py       |   5 +-
 .../YARN/package/scripts/mapreduce2_client.py   |   5 +-
 .../YARN/package/scripts/nodemanager.py         |   5 +-
 .../YARN/package/scripts/resourcemanager.py     |   5 +-
 .../YARN/package/scripts/yarn_client.py         |   5 +-
 .../ZOOKEEPER/package/scripts/zookeeper.py      |   2 +-
 .../package/scripts/zookeeper_client.py         |   5 +-
 .../package/scripts/zookeeper_server.py         |   5 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     |  15 +
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |   2 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |   3 -
 .../scripts/shared_initialization.py            |  39 +-
 .../2.0.6/properties/stack_select_packages.json | 952 +++++++++++++++++++
 .../python/stacks/2.0.6/FLUME/test_flume.py     |   5 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   5 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   4 +
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   4 +
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   3 +
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   4 +
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |   5 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   4 +
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   4 +
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   3 +
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |   3 +
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   6 +
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |   4 +
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   4 +
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  11 +
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   4 +
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |   3 +
 .../stacks/2.0.6/YARN/test_historyserver.py     |   5 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   5 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       |   6 +
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |   3 +
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   4 +
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |   4 +
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |   5 +
 .../hooks/after-INSTALL/test_after_install.py   |  43 +-
 .../stacks/2.1/FALCON/test_falcon_client.py     |   4 +
 .../stacks/2.1/FALCON/test_falcon_server.py     |   5 +
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  26 +-
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |   4 +
 .../stacks/2.1/STORM/test_storm_nimbus.py       |   3 +
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |   3 +
 .../stacks/2.1/STORM/test_storm_supervisor.py   |   4 +
 .../2.1/STORM/test_storm_supervisor_prod.py     |   4 +
 .../stacks/2.1/STORM/test_storm_ui_server.py    |   5 +-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |   4 +
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   3 +
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |   4 +
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   4 +
 .../stacks/2.2/SLIDER/test_slider_client.py     |   6 +
 .../stacks/2.2/SPARK/test_job_history_server.py |   3 +
 .../stacks/2.2/SPARK/test_spark_client.py       |   4 +
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   4 +
 .../2.3/SPARK/test_spark_thrift_server.py       |   3 +
 .../src/test/python/stacks/utils/RMFTestCase.py |  12 +
 219 files changed, 2320 insertions(+), 807 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index 265e7df..9e28907 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -22,6 +22,7 @@ limitations under the License.
 import os
 import sys
 import re
+import ambari_simplejson as json
 
 # Local Imports
 from resource_management.core.logger import Logger
@@ -40,50 +41,6 @@ from resource_management.libraries.functions import StackFeature
 
 STACK_SELECT_PREFIX = 'ambari-python-wrap'
 
-# a mapping of Ambari server role to <stack-selector-tool> component name for all
-# non-clients
-SERVER_ROLE_DIRECTORY_MAP = {
-  'ACCUMULO_MASTER' : 'accumulo-master',
-  'ACCUMULO_MONITOR' : 'accumulo-monitor',
-  'ACCUMULO_GC' : 'accumulo-gc',
-  'ACCUMULO_TRACER' : 'accumulo-tracer',
-  'ACCUMULO_TSERVER' : 'accumulo-tablet',
-  'ATLAS_SERVER' : 'atlas-server',
-  'FLUME_HANDLER' : 'flume-server',
-  'FALCON_SERVER' : 'falcon-server',
-  'NAMENODE' : 'hadoop-hdfs-namenode',
-  'DATANODE' : 'hadoop-hdfs-datanode',
-  'SECONDARY_NAMENODE' : 'hadoop-hdfs-secondarynamenode',
-  'NFS_GATEWAY' : 'hadoop-hdfs-nfs3',
-  'JOURNALNODE' : 'hadoop-hdfs-journalnode',
-  'HBASE_MASTER' : 'hbase-master',
-  'HBASE_REGIONSERVER' : 'hbase-regionserver',
-  'HIVE_METASTORE' : 'hive-metastore',
-  'HIVE_SERVER' : 'hive-server2',
-  'HIVE_SERVER_INTERACTIVE' : 'hive-server2-hive2',
-  'WEBHCAT_SERVER' : 'hive-webhcat',
-  'KAFKA_BROKER' : 'kafka-broker',
-  'KNOX_GATEWAY' : 'knox-server',
-  'OOZIE_SERVER' : 'oozie-server',
-  'RANGER_ADMIN' : 'ranger-admin',
-  'RANGER_USERSYNC' : 'ranger-usersync',
-  'RANGER_TAGSYNC' : 'ranger-tagsync',
-  'RANGER_KMS' : 'ranger-kms',
-  'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver',
-  'NIMBUS' : 'storm-nimbus',
-  'SUPERVISOR' : 'storm-supervisor',
-  'HISTORYSERVER' : 'hadoop-mapreduce-historyserver',
-  'APP_TIMELINE_SERVER' : 'hadoop-yarn-timelineserver',
-  'NODEMANAGER' : 'hadoop-yarn-nodemanager',
-  'RESOURCEMANAGER' : 'hadoop-yarn-resourcemanager',
-  'ZOOKEEPER_SERVER' : 'zookeeper-server',
-
-  # ZKFC is tied to NN since it doesn't have its own componnet in <stack-selector-tool> and there is
-  # a requirement that the ZKFC is installed on each NN
-  'ZKFC' : 'hadoop-hdfs-namenode'
-}
-
 # mapping of service check to <stack-selector-tool> component
 SERVICE_CHECK_DIRECTORY_MAP = {
   "HDFS_SERVICE_CHECK" : "hadoop-client",
@@ -113,6 +70,110 @@ HADOOP_DIR_DEFAULTS = {
   "lib": "/usr/lib/hadoop/lib"
 }
 
+PACKAGE_SCOPE_INSTALL = "INSTALL"
+PACKAGE_SCOPE_STANDARD = "STANDARD"
+PACKAGE_SCOPE_PATCH = "PATCH"
+PACKAGE_SCOPE_STACK_SELECT = "STACK-SELECT-PACKAGE"
+_PACKAGE_SCOPES = (PACKAGE_SCOPE_INSTALL, PACKAGE_SCOPE_STANDARD, PACKAGE_SCOPE_PATCH, PACKAGE_SCOPE_STACK_SELECT)
+
+
+def get_package_name(default_package = None):
+  """
+  Gets the stack-select package name for the service name and
+  component from the current command. Not all services/components are used with the
+  stack-select tools, so those will return no packages.
+
+  :return:  the stack-select package name for the command's component or None
+  """
+  config = Script.get_config()
+
+  if 'role' not in config or 'serviceName' not in config:
+    raise Fail("Both the role and the service name must be included in the command in order to determine which packages to use with the stack-select tool")
+
+  service_name = config['serviceName']
+  component_name = config['role']
+
+  # should return a single item
+  try:
+    package = get_packages(PACKAGE_SCOPE_STACK_SELECT, service_name, component_name)
+    if package is None:
+      package = default_package
+
+    return package
+  except:
+    if default_package is not None:
+      return default_package
+    else:
+      raise
+
+
+
+def get_packages(scope, service_name = None, component_name = None):
+  """
+  Gets the packages which should be used with the stack's stack-select tool for the
+  specified service/component. Not all services/components are used with the stack-select tools,
+  so those will return no packages.
+
+  :param scope: the scope of the command
+  :param service_name:  the service name, such as ZOOKEEPER
+  :param component_name: the component name, such as ZOOKEEPER_SERVER
+  :return:  the packages to use with stack-select or None
+  """
+  from resource_management.libraries.functions.default import default
+
+  import time
+
+  if scope not in _PACKAGE_SCOPES:
+    raise Fail("The specified scope of {0} is not valid".format(scope))
+
+  config = Script.get_config()
+
+  if service_name is None or component_name is None:
+    if 'role' not in config or 'serviceName' not in config:
+      raise Fail("Both the role and the service name must be included in the command in order to determine which packages to use with the stack-select tool")
+
+    service_name = config['serviceName']
+    component_name = config['role']
+
+
+  stack_name = default("/hostLevelParams/stack_name", None)
+  if stack_name is None:
+    raise Fail("The stack name is not present in the command. Packages for stack-select tool cannot be loaded.")
+
+  stack_select_packages_config = default("/configurations/cluster-env/stack_select_packages", None)
+  if stack_select_packages_config is None:
+    raise Fail("The stack packages are not defined on the command. Unable to load packages for the stack-select tool")
+
+  data = json.loads(stack_select_packages_config)
+
+  if stack_name not in data:
+    raise Fail(
+      "Cannot find stack-select packages for the {0} stack".format(stack_name))
+
+  stack_select_key = "stack-select"
+  data = data[stack_name]
+  if stack_select_key not in data:
+    raise Fail(
+      "There are no stack-select packages defined for this command for the {0} stack".format(stack_name))
+
+  # this should now be the dictionary of role name to package name
+  data = data[stack_select_key]
+  service_name = service_name.upper()
+  component_name = component_name.upper()
+
+  if service_name not in data:
+    Logger.info("Skipping stack-select on {0} because it does not exist in the stack-select package structure.".format(service_name))
+    return None
+
+  data = data[service_name]
+
+  if component_name not in data:
+    Logger.info("Skipping stack-select on {0} because it does not exist in the stack-select package structure.".format(component_name))
+    return None
+
+  return data[component_name][scope]
+
+
 def select_all(version_to_select):
   """
   Executes <stack-selector-tool> on every component for the specified version. If the value passed in is a
@@ -135,6 +196,20 @@ def select_all(version_to_select):
   Execute(command, only_if = only_if_command)
 
 
+def select_packages(version):
+  """
+  Uses the command's service and role to determine the stack-select packages which need to be invoked.
+  :param version: the version to select
+  :return: None
+  """
+  stack_select_packages = get_packages(PACKAGE_SCOPE_STANDARD)
+  if stack_select_packages is None:
+    return
+
+  for stack_select_package_name in stack_select_packages:
+    select(stack_select_package_name, version)
+
+
 def select(component, version):
   """
   Executes <stack-selector-tool> on the specific component and version. Some global
@@ -170,15 +245,15 @@ def get_role_component_current_stack_version():
   Gets the current HDP version of the component that this role command is for.
   :return:  the current HDP version of the specified component or None
   """
-  stack_select_component = None
   role = default("/role", "")
   role_command =  default("/roleCommand", "")
+
   stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
 
-  if role in SERVER_ROLE_DIRECTORY_MAP:
-    stack_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
-  elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
+  if role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
     stack_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
+  else:
+    stack_select_component = get_package_name()
 
   if stack_select_component is None:
     return None

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index cce3ac4..abbada4 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -182,12 +182,6 @@ class Script(object):
     except IOError, err:
       Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
 
-  def get_component_name(self):
-    """
-    To be overridden by subclasses.
-     Returns a string with the component name used in selecting the version.
-    """
-    pass
 
   def get_config_dir_during_stack_upgrade(self, env, base_dir, conf_select_name):
     """
@@ -217,11 +211,13 @@ class Script(object):
     :param stack_name: One of HDP, HDPWIN, PHD, BIGTOP.
     :return: Append the version number to the structured out.
     """
+    from resource_management.libraries.functions import stack_select
+
     stack_name = Script.get_stack_name()
-    component_name = self.get_component_name()
+    stack_select_package_name = stack_select.get_package_name()
 
-    if component_name and stack_name:
-      component_version = get_component_version(stack_name, component_name)
+    if stack_select_package_name and stack_name:
+      component_version = get_component_version(stack_name, stack_select_package_name)
 
       if component_version:
         self.put_structured_out({"version": component_version})
@@ -426,11 +422,12 @@ class Script(object):
 
     :return: stack version including the build number. e.g.: 2.3.4.0-1234.
     """
+    from resource_management.libraries.functions import stack_select
+
     # preferred way is to get the actual selected version of current component
-    component_name = self.get_component_name()
-    if not Script.stack_version_from_distro_select and component_name:
-      from resource_management.libraries.functions import stack_select
-      Script.stack_version_from_distro_select = stack_select.get_stack_version_before_install(component_name)
+    stack_select_package_name = stack_select.get_package_name()
+    if not Script.stack_version_from_distro_select and stack_select_package_name:
+      Script.stack_version_from_distro_select = stack_select.get_stack_version_before_install(stack_select_package_name)
 
     # If <stack-selector-tool> has not yet been done (situations like first install),
     # we can use <stack-selector-tool> version itself.

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 50cea9e..ec8b38c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -2953,14 +2953,15 @@ public class BlueprintConfigurationProcessor {
 
     Set<String> properties = Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
-        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY);
+        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY,
+        ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY);
 
     try {
       Map<String, Map<String, String>> defaultStackProperties = configHelper.getDefaultStackProperties(stackId);
       Map<String,String> clusterEnvDefaultProperties = defaultStackProperties.get(CLUSTER_ENV_CONFIG_TYPE_NAME);
 
       for( String property : properties ){
-        if (defaultStackProperties.containsKey(property)) {
+        if (clusterEnvDefaultProperties.containsKey(property)) {
           configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, property,
               clusterEnvDefaultProperties.get(property));
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 974ad4f..8270370 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -92,6 +92,7 @@ public class ConfigHelper {
   public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = "stack_features";
   public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools";
   public static final String CLUSTER_ENV_STACK_ROOT_PROPERTY = "stack_root";
+  public static final String CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY = "stack_select_packages";
 
   public static final String HTTP_ONLY = "HTTP_ONLY";
   public static final String HTTPS_ONLY = "HTTPS_ONLY";

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
index dad0ecf..55f9665 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
@@ -63,6 +63,7 @@ public class FinalUpgradeCatalog extends AbstractFinalUpgradeCatalog {
    * <ul>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_FEATURES_PROPERTY} from stack</li>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_TOOLS_PROPERTY} from stack</li>
+   * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY} from stack</li>
    * </ul>
    *
    * Note: Config properties stack_features and stack_tools should always be updated to latest values as defined
@@ -84,7 +85,8 @@ public class FinalUpgradeCatalog extends AbstractFinalUpgradeCatalog {
       List<PropertyInfo> properties = stackInfo.getProperties();
       for(PropertyInfo property : properties) {
         if(property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY) ||
-            property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY)) {
+            property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY) || 
+            property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY)) {
           propertyMap.put(property.getName(), property.getValue());
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
index 67ca525..856446c 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
@@ -30,9 +30,6 @@ from accumulo_configuration import setup_conf_dir
 
 
 class AccumuloClient(Script):
-  def get_component_name(self):
-    return "accumulo-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -60,7 +57,7 @@ class AccumuloClient(Script):
 
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
     conf_select.select(params.stack_name, "accumulo", params.version)
-    stack_select.select("accumulo-client", params.version)
+    stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   AccumuloClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
index ebd418d..a0519b2 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
@@ -37,33 +37,10 @@ from accumulo_service import accumulo_service
 
 class AccumuloScript(Script):
 
-  # a mapping between the component named used by these scripts and the name
-  # which is used by <stack-selector-tool>
-  COMPONENT_TO_STACK_SELECT_MAPPING = {
-    "gc" : "accumulo-gc",
-    "master" : "accumulo-master",
-    "monitor" : "accumulo-monitor",
-    "tserver" : "accumulo-tablet",
-    "tracer" : "accumulo-tracer"
-  }
-
   def __init__(self, component):
     self.component = component
 
 
-  def get_component_name(self):
-    """
-    Gets the <stack-selector-tool> component name given the script component
-    :return:  the name of the component on the stack which is used by
-              <stack-selector-tool>
-    """
-    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
-      return None
-
-    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
-    return stack_component
-
-
   def install(self, env):
     self.install_packages(env)
 
@@ -107,19 +84,12 @@ class AccumuloScript(Script):
     if not (params.stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version_formatted)):
       return
 
-    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
-      Logger.info("Unable to execute an upgrade for unknown component {0}".format(self.component))
-      raise Fail("Unable to execute an upgrade for unknown component {0}".format(self.component))
-
-    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
+    stack_component = stack_select.get_package_name()
 
     Logger.info("Executing Accumulo Upgrade pre-restart for {0}".format(stack_component))
     conf_select.select(params.stack_name, "accumulo", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
-    # some accumulo components depend on the client, so update that too
-    stack_select.select("accumulo-client", params.version)
-      
   def get_log_folder(self):
     import params
     return params.log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
index d01ff84..6e4a168 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
@@ -30,16 +30,13 @@ from metadata import metadata
 
 class AtlasClient(Script):
 
-  def get_component_name(self):
-    return "atlas-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version_for_stack_feature_checks):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index 38f9a41..ff7bf5f 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -38,10 +38,6 @@ from setup_ranger_atlas import setup_ranger_atlas
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class MetadataServer(Script):
-
-  def get_component_name(self):
-    return "atlas-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -63,7 +59,7 @@ class MetadataServer(Script):
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-server", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
index 7c6bf39..20623f7 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
@@ -35,10 +35,6 @@ class DruidBase(Script):
   def __init__(self, nodeType=None):
     self.nodeType = nodeType
 
-  def get_component_name(self):
-    node_type_lower = self.nodeType.lower()
-    return format("druid-{node_type_lower}")
-
   def install(self, env):
     self.install_packages(env)
 
@@ -55,7 +51,7 @@ class DruidBase(Script):
     env.set_params(params)
 
     if params.stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
-      stack_select.select(self.get_component_name(), params.stack_version)
+      stack_select.select_packages(params.stack_version)
     if params.stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
       conf_select.select(params.stack_name, "druid", params.stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
index b837b24..36dab51 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
@@ -36,9 +36,6 @@ from resource_management.libraries.resources.properties_file import PropertiesFi
 
 class Superset(Script):
 
-  def get_component_name(self):
-    return format("druid-superset")
-
   def install(self, env):
     self.install_packages(env)
 
@@ -98,7 +95,7 @@ class Superset(Script):
     env.set_params(params)
 
     if params.stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
-      stack_select.select(self.get_component_name(), params.stack_version)
+      stack_select.select_packages(params.version)
     if params.stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
       conf_select.select(params.stack_name, "superset", params.stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
index 365f661..85d80ba 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
@@ -38,9 +38,6 @@ class FalconClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconClientLinux(FalconClient):
-  def get_component_name(self):
-    return "falcon-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -57,7 +54,7 @@ class FalconClientLinux(FalconClient):
 
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
-    stack_select.select("falcon-client", params.version)
+    stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class FalconClientWindows(FalconClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
index 5b2db44..c7ba556 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
@@ -60,9 +60,6 @@ class FalconServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconServerLinux(FalconServer):
-  def get_component_name(self):
-    return "falcon-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -85,7 +82,7 @@ class FalconServerLinux(FalconServer):
 
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
-    stack_select.select("falcon-server", params.version)
+    stack_select.select_packages(params.version)
 
     falcon_server_upgrade.pre_start_restore()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
index 107ce6d..f57fae7 100644
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
@@ -40,9 +40,6 @@ class FlumeHandler(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FlumeHandlerLinux(FlumeHandler):
-  def get_component_name(self):
-    return "flume-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -90,7 +87,7 @@ class FlumeHandlerLinux(FlumeHandler):
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "flume", params.version)
-    stack_select.select("flume-server", params.version)
+    stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
index f18a96a..4820d24 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
@@ -52,20 +52,16 @@ class HbaseClientWindows(HbaseClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseClientDefault(HbaseClient):
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
 
       # phoenix may not always be deployed
       try:
-        stack_select.select("phoenix-client", params.version)
+        stack_select.select_packages(params.version)
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print str(e)
@@ -75,7 +71,6 @@ class HbaseClientDefault(HbaseClient):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
index 8151572..5932c65 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
@@ -76,13 +76,10 @@ class HbaseMasterWindows(HbaseMaster):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseMasterDefault(HbaseMaster):
-  def get_component_name(self):
-    return "hbase-master"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
index 9194991..10dde75 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
@@ -70,13 +70,10 @@ class HbaseRegionServerWindows(HbaseRegionServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
index b1bdb78..f2b44bd 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
@@ -34,10 +34,6 @@ class PhoenixQueryServer(Script):
     self.install_packages(env)
 
 
-  def get_component_name(self):
-    return "phoenix-server"
-
-
   def configure(self, env):
     import params
     env.set_params(params)
@@ -64,7 +60,7 @@ class PhoenixQueryServer(Script):
     if params.stack_version_formatted and check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("phoenix-server", params.version)
+      stack_select.select_packages(params.version)
 
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
index 43c7ff4..8ea3616 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
@@ -33,12 +33,12 @@ from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import check_process_status
 
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index c7b813f..ba926cb 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -38,14 +38,11 @@ from utils import get_dfsadmin_base_command
 
 class DataNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-datanode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
+    component_name = stack_select.get_package_name()
     return get_hdfs_binary(component_name)
 
 
@@ -133,7 +130,7 @@ class DataNodeDefault(DataNode):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 87a6f52..f320eee 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -57,15 +57,12 @@ class HdfsClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HdfsClientDefault(HdfsClient):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
index 0805ff4..402e7ad 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
@@ -42,9 +42,6 @@ class JournalNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class JournalNodeDefault(JournalNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-journalnode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -52,7 +49,7 @@ class JournalNodeDefault(JournalNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 7f7e30c..aac7977 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -67,14 +67,11 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-namenode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
+    component_name = stack_select.get_package_name()
     return get_hdfs_binary(component_name)
 
   def install(self, env):
@@ -204,7 +201,7 @@ class NameNodeDefault(NameNode):
     if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or params.upgrade_direction != Direction.DOWNGRADE:
       conf_select.select(params.stack_name, "hadoop", params.version)
 
-    stack_select.select("hadoop-hdfs-namenode", params.version)
+    stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
index 03a497a..dea9d43 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
@@ -31,10 +31,6 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 
 
 class NFSGateway(Script):
-
-  def get_component_name(self):
-    return "hadoop-hdfs-nfs3"
-
   def install(self, env):
     import params
 
@@ -48,7 +44,7 @@ class NFSGateway(Script):
 
     if params.stack_version_formatted and check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
index ac45ffd..115a830 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
@@ -64,9 +64,6 @@ class SNameNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SNameNodeDefault(SNameNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-secondarynamenode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -74,7 +71,7 @@ class SNameNodeDefault(SNameNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select_packages(params.version)
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index db68544..fabbabb 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -35,13 +35,6 @@ from resource_management.libraries.script import Script
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class ZkfcSlave(Script):
-  def get_component_name(self):
-    import params
-    if params.version_for_stack_feature_checks and check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, params.version_for_stack_feature_checks):
-      # params.version is not defined when installing cluster from blueprint
-      return "hadoop-hdfs-zkfc"
-    pass
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -137,7 +130,7 @@ class ZkfcSlaveDefault(ZkfcSlave):
     env.set_params(params)
     if check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, params.version_for_stack_feature_checks):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-zkfc", params.version)
+      stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):
   try:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
index b37698e..677479f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
@@ -51,12 +51,6 @@ class HCatClientWindows(HCatClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_component_name(self):
-    # HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
-    # update after daemons, this ensures that the hcat directories are correct on hosts
-    # which do not include the WebHCat daemon
-    return "hive-webhcat"
-
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     """
@@ -78,7 +72,7 @@ class HCatClientDefault(HCatClient):
     # HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
     # update after daemons, this ensures that the hcat directories are correct on hosts
     # which do not include the WebHCat daemon
-    stack_select.select("hive-webhcat", params.version)
+    stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index 3d9bfd7..1694816 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -50,9 +50,6 @@ class HiveClientWindows(HiveClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveClientDefault(HiveClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Hive client Stack Upgrade pre-restart")
 
@@ -61,7 +58,7 @@ class HiveClientDefault(HiveClient):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 633e82e..c84f47e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -87,10 +87,6 @@ class HiveMetastoreWindows(HiveMetastore):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveMetastoreDefault(HiveMetastore):
-  def get_component_name(self):
-    return "hive-metastore"
-
-
   def status(self, env):
     import status_params
     from resource_management.libraries.functions import check_process_status
@@ -111,7 +107,7 @@ class HiveMetastoreDefault(HiveMetastore):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-metastore", params.version)
+      stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \
             check_stack_feature(StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA, params.stack_version_formatted_major):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 8f7d068..a1212e0 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -72,9 +72,6 @@ class HiveServerWindows(HiveServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerDefault(HiveServer):
-  def get_component_name(self):
-    return "hive-server2"
-
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -118,7 +115,7 @@ class HiveServerDefault(HiveServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-server2", params.version)
+      stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
       resource_created = copy_to_hdfs(

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index c9582a3..16d7907 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -67,10 +67,6 @@ class HiveServerInteractive(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerInteractiveDefault(HiveServerInteractive):
-
-    def get_component_name(self):
-      return "hive-server2-hive2"
-
     def install(self, env):
       import params
       self.install_packages(env)
@@ -86,7 +82,7 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
       env.set_params(params)
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-        stack_select.select("hive-server2-hive2", params.version)
+        stack_select.select_packages(params.version)
         conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index da5e82b..db86d9e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -64,9 +64,6 @@ class WebHCatServerWindows(WebHCatServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class WebHCatServerDefault(WebHCatServer):
-  def get_component_name(self):
-    return "hive-webhcat"
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -81,7 +78,7 @@ class WebHCatServerDefault(WebHCatServer):
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
       conf_select.select(params.stack_name, "hive-hcatalog", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hive-webhcat", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
index 4512038..08c3dd1 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
@@ -36,9 +36,6 @@ from setup_ranger_kafka import setup_ranger_kafka
 
 class KafkaBroker(Script):
 
-  def get_component_name(self):
-    return "kafka-broker"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -52,7 +49,7 @@ class KafkaBroker(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      stack_select.select("kafka-broker", params.version)
+      stack_select.select_packages(params.version)
 
     if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
       conf_select.select(params.stack_name, "kafka", params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
index c91b394..6df68cc 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
@@ -49,9 +49,6 @@ from setup_ranger_knox import setup_ranger_knox
 
 
 class KnoxGateway(Script):
-  def get_component_name(self):
-    return "knox-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -122,7 +119,7 @@ class KnoxGatewayDefault(KnoxGateway):
 
     # <conf-selector-tool> will change the symlink to the conf folder.
     conf_select.select(params.stack_name, "knox", params.version)
-    stack_select.select("knox-server", params.version)
+    stack_select.select_packages(params.version)
 
     # seed the new Knox data directory with the keystores of yesteryear
     if params.upgrade_direction == Direction.UPGRADE:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
index 3486add..b598d17 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
@@ -29,18 +29,13 @@ from resource_management.libraries.functions.default import default
 
 class MahoutClient(Script):
 
-  def get_component_name(self):
-    return "mahout-client"
-
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
     env.set_params(params)
 
     conf_select.select(params.stack_name, "mahout", params.version)
-    stack_select.select("mahout-client", params.version )
-
+    stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
index 9bbca9f..ac8b078 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
@@ -30,9 +30,6 @@ from resource_management.core.exceptions import ClientComponentHasNoStatus
 
 class OozieClient(Script):
 
-  def get_component_name(self):
-    return "oozie-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -59,7 +56,7 @@ class OozieClient(Script):
 
     Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "oozie", params.version)
-    stack_select.select("oozie-client", params.version)
+    stack_select.select_packages(params.version)
 
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).
   # This function returns changed configs (it's used for config generation before config download)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
index b3a8643..82a764d 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
@@ -46,9 +46,6 @@ from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class OozieServer(Script):
 
-  def get_component_name(self):
-    return "oozie-server"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -67,9 +64,8 @@ class OozieServer(Script):
         # This is required as both need to be pointing to new installed oozie version.
 
         # Sets the symlink : eg: <stack-root>/current/oozie-client -> <stack-root>/a.b.c.d-<version>/oozie
-        stack_select.select("oozie-client", params.version)
         # Sets the symlink : eg: <stack-root>/current/oozie-server -> <stack-root>/a.b.c.d-<version>/oozie
-        stack_select.select("oozie-server", params.version)
+        stack_select.select_packages(params.version)
 
       if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
         conf_select.select(params.stack_name, "oozie", params.version)
@@ -127,7 +123,7 @@ class OozieServerDefault(OozieServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "oozie", params.version)
-      stack_select.select("oozie-server", params.version)
+      stack_select.select_packages(params.version)
 
     OozieUpgrade.prepare_libext_directory(upgrade_type=upgrade_type)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
index 3233381..693af5c 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
@@ -43,9 +43,6 @@ class PigClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class PigClientLinux(PigClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -53,7 +50,7 @@ class PigClientLinux(PigClient):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
       conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version) # includes pig-client
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py b/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
index 3dbce5c..f973e69 100755
--- a/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
+++ b/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
@@ -34,9 +34,6 @@ class R4MLClient(Script):
     import params
     env.set_params(params)
 
-  def get_component_name(self):
-    return "r4ml-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -44,7 +41,7 @@ class R4MLClient(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing R4ML Client Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "r4ml", params.version)
-      stack_select.select("r4ml-client", params.version)
+      stack_select.select_packages(params.version)
 
   def stack_upgrade_save_new_config(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
index ecbacbf..e27a03e 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
@@ -38,9 +38,6 @@ import os, errno
 
 class RangerAdmin(Script):
 
-  def get_component_name(self):
-    return "ranger-admin"
-
   def install(self, env):
     self.install_packages(env)
     import params
@@ -210,7 +207,7 @@ class RangerAdmin(Script):
     stack_name = upgrade_stack[0]
     stack_version = upgrade_stack[1]
 
-    stack_select.select("ranger-admin", stack_version)
+    stack_select.select_packages(params.version)
     conf_select.select(stack_name, "ranger-admin", stack_version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
index a474e76..5ff498f 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
@@ -86,10 +86,7 @@ class RangerTagsync(Script):
     if params.stack_supports_ranger_tagsync:
       Logger.info("Executing Ranger Tagsync Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "ranger-tagsync", params.version)
-      stack_select.select("ranger-tagsync", params.version)
-
-  def get_component_name(self):
-    return "ranger-tagsync"
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params
@@ -111,7 +108,7 @@ class RangerTagsync(Script):
     stack_name = upgrade_stack[0]
     stack_version = upgrade_stack[1]
 
-    stack_select.select("ranger-tagsync", stack_version)
+    stack_select.select_packages(params.version)
     conf_select.select(stack_name, "ranger-tagsync", stack_version)
     if params.stack_supports_ranger_tagsync_ssl_xml_support:
       Logger.info("Upgrading Tagsync, stack support Atlas user for Tagsync, creating keystore for same.")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
index b9366f6..8654bc2 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
@@ -109,9 +109,6 @@ class RangerUsersync(Script):
     env.set_params(params)
     upgrade.prestart(env, "ranger-usersync")
 
-  def get_component_name(self):
-    return "ranger-usersync"
-
   def get_log_folder(self):
     import params
     return params.usersync_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
index a07a1fd..ca1b2bf 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
@@ -28,4 +28,4 @@ def prestart(env, stack_component):
 
   if params.version and params.stack_supports_rolling_upgrade:
     conf_select.select(params.stack_name, stack_component, params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
index 829a998..0bd11f3 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
@@ -33,9 +33,6 @@ import upgrade
 
 class KmsServer(Script):
 
-  def get_component_name(self):
-    return "ranger-kms"
-
   def install(self, env):
     self.install_packages(env)
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
index 8478bb8..73c32ff 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
@@ -27,4 +27,4 @@ def prestart(env, stack_component):
 
   if params.version and params.stack_supports_config_versioning:
     conf_select.select(params.stack_name, stack_component, params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
index 08c8569..adc7026 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
@@ -35,22 +35,18 @@ class SliderClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SliderClientLinux(SliderClient):
-  def get_component_name(self):
-    return "slider-client"
-
   def pre_upgrade_restart(self, env,  upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "slider", params.version)
-      stack_select.select("slider-client", params.version)
+      stack_select.select_packages(params.version)
 
       # also set all of the hadoop clients since slider client is upgraded as
       # part of the final "CLIENTS" group and we need to ensure that
       # hadoop-client is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
index 4fe2f54..c36def4 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
@@ -68,9 +68,6 @@ class JobHistoryServer(Script):
     check_process_status(status_params.spark_history_server_pid_file)
     
 
-  def get_component_name(self):
-    return "spark-historyserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -78,7 +75,7 @@ class JobHistoryServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-historyserver", params.version)
+      stack_select.select_packages(params.version)
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
       # need to copy the tarball, otherwise, copy it.


[2/5] ambari git commit: AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
index 3b937b0..57d0d72 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
@@ -28,16 +28,13 @@ from yarn import yarn
 
 class MapReduce2Client(Script):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
index 1d959e1..6b09b13 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
@@ -35,9 +35,6 @@ from service import service
 
 class Nodemanager(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-nodemanager"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -53,7 +50,7 @@ class Nodemanager(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-nodemanager", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-nodemanager {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
index 46d825a..274100d 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
@@ -33,9 +33,6 @@ from setup_ranger_yarn import setup_ranger_yarn
 
 class Resourcemanager(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-resourcemanager"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -52,7 +49,7 @@ class Resourcemanager(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-resourcemanager", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-resourcemanager {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
index e20e483..e3b9125 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
@@ -40,16 +40,13 @@ class YarnClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
index cea5d64..9253f43 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
@@ -74,7 +74,7 @@ def zookeeper(type = None, upgrade_type=None):
     # This path may be missing after Ambari upgrade. We need to create it.
     if (upgrade_type == "rolling") and (not os.path.exists("/usr/iop/current/zookeeper-server")) and params.current_version:
       conf_select(params.stack_name, "zookeeper", params.current_version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-server {version}"))
 
   if (params.log4j_props != None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
index f579168..f5408aa 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
@@ -30,9 +30,6 @@ from zookeeper import zookeeper
 
 class ZookeeperClient(Script):
 
-  def get_component_name(self):
-    return "zookeeper-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -50,7 +47,7 @@ class ZookeeperClient(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-client {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
index e0ce881..fb92b2e 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
@@ -40,9 +40,6 @@ from zookeeper_service import zookeeper_service
 
 class ZookeeperServer(Script):
 
-  def get_component_name(self):
-    return "zookeeper-server"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -59,7 +56,7 @@ class ZookeeperServer(Script):
 
     if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-server {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index c6b091d..2d797bb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -262,6 +262,21 @@ gpgcheck=0</value>
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
+  <!-- Define stack_select_packages property in the base stack. DO NOT override this property for each stack version -->
+  <property>
+    <name>stack_select_packages</name>
+    <value/>
+    <description>Associations between component and stack-select tools.</description>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <property-file-name>stack_select_packages.json</property-file-name>
+      <property-file-type>json</property-file-type>
+      <read-only>true</read-only>
+      <overridable>false</overridable>
+      <visible>false</visible>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
   <property>
     <name>stack_root</name>
     <value>{"HDP":"/usr/hdp"}</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
index 8a583b3..8bae9e6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
@@ -28,7 +28,7 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_stack_symlinks()
+    setup_stack_symlinks(self.stroutfile)
     setup_config()
 
     link_configs(self.stroutfile)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 1782298..24f9076 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -41,9 +41,6 @@ sudo = AMBARI_SUDO_BINARY
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index e9f2283..ce106d2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -23,13 +23,12 @@ from resource_management.core.logger import Logger
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.fcntl_based_process_lock import FcntlBasedProcessLock
 from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.script import Script
 
 
-def setup_stack_symlinks():
+def setup_stack_symlinks(struct_out_file):
   """
   Invokes <stack-selector-tool> set all against a calculated fully-qualified, "normalized" version based on a
   stack version, such as "2.3". This should always be called after a component has been
@@ -38,18 +37,30 @@ def setup_stack_symlinks():
   :return:
   """
   import params
-  if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
-    # try using the exact version first, falling back in just the stack if it's not defined
-    # which would only be during an intial cluster installation
-    version = params.current_version if params.current_version is not None else params.stack_version_unformatted
-
-    if not params.upgrade_suspended:
-      if params.host_sys_prepped:
-        Logger.warning("Skipping running stack-selector-tool for stack {0} as its a sys_prepped host. This may cause symlink pointers not to be created for HDP componets installed later on top of an already sys_prepped host.".format(version))
-        return
-      # On parallel command execution this should be executed by a single process at a time.
-      with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-        stack_select.select_all(version)
+  if params.upgrade_suspended:
+    Logger.warning("Skipping running stack-selector-tool because there is a suspended upgrade")
+    return
+
+  if params.host_sys_prepped:
+    Logger.warning("Skipping running stack-selector-tool becase this is a sys_prepped host. This may cause symlink pointers not to be created for HDP componets installed later on top of an already sys_prepped host.")
+    return
+
+  # get the packages which the stack-select tool should be used on
+  stack_select_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_select_packages is None:
+    return
+
+  json_version = load_version(struct_out_file)
+
+  if not json_version:
+    Logger.info("There is no advertised version for this component stored in {0}".format(struct_out_file))
+    return
+
+  # On parallel command execution this should be executed by a single process at a time.
+  with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+    for package in stack_select_packages:
+      stack_select.select(package, json_version)
+
 
 def setup_config():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
new file mode 100644
index 0000000..2747188
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
@@ -0,0 +1,952 @@
+{
+  "HDP": {
+    "stack-select": {
+      "ACCUMULO": {
+        "ACCUMULO_CLIENT": {
+          "STACK-SELECT-PACKAGE": "accumulo-client",
+          "INSTALL": [
+            "accumulo-client"
+          ],
+          "PATCH": [
+            "accumulo-client"
+          ],
+          "STANDARD": [
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_GC": {
+          "STACK-SELECT-PACKAGE": "accumulo-gc",
+          "INSTALL": [
+            "accumulo-gc"
+          ],
+          "PATCH": [
+            "accumulo-gc"
+          ],
+          "STANDARD": [
+            "accumulo-gc",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MASTER": {
+          "STACK-SELECT-PACKAGE": "accumulo-master",
+          "INSTALL": [
+            "accumulo-master"
+          ],
+          "PATCH": [
+            "accumulo-master"
+          ],
+          "STANDARD": [
+            "accumulo-master",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MONITOR": {
+          "STACK-SELECT-PACKAGE": "accumulo-monitor",
+          "INSTALL": [
+            "accumulo-monitor"
+          ],
+          "PATCH": [
+            "accumulo-monitor"
+          ],
+          "STANDARD": [
+            "accumulo-monitor",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TRACER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tracer",
+          "INSTALL": [
+            "accumulo-tracer"
+          ],
+          "PATCH": [
+            "accumulo-tracer"
+          ],
+          "STANDARD": [
+            "accumulo-tracer",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TSERVER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tablet",
+          "INSTALL": [
+            "accumulo-tablet"
+          ],
+          "PATCH": [
+            "accumulo-tablet"
+          ],
+          "STANDARD": [
+            "accumulo-tablet",
+            "accumulo-client"
+          ]
+        }
+      },
+      "ATLAS": {
+        "ATLAS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "atlas-client",
+          "INSTALL": [
+            "atlas-client"
+          ],
+          "PATCH": [
+            "atlas-client"
+          ],
+          "STANDARD": [
+            "atlas-client"
+          ]
+        },
+        "ATLAS_SERVER": {
+          "STACK-SELECT-PACKAGE": "atlas-server",
+          "INSTALL": [
+            "atlas-server"
+          ],
+          "PATCH": [
+            "atlas-server"
+          ],
+          "STANDARD": [
+            "atlas-server"
+          ]
+        }
+      },
+      "DRUID": {
+        "DRUID_COORDINATOR": {
+          "STACK-SELECT-PACKAGE": "druid-coordinator",
+          "INSTALL": [
+            "druid-coordinator"
+          ],
+          "PATCH": [
+            "druid-coordinator"
+          ],
+          "STANDARD": [
+            "druid-coordinator"
+          ]
+        },
+        "DRUID_OVERLORD": {
+          "STACK-SELECT-PACKAGE": "druid-overlord",
+          "INSTALL": [
+            "druid-overlord"
+          ],
+          "PATCH": [
+            "druid-overlord"
+          ],
+          "STANDARD": [
+            "druid-overlord"
+          ]
+        },
+        "DRUID_HISTORICAL": {
+          "STACK-SELECT-PACKAGE": "druid-historical",
+          "INSTALL": [
+            "druid-historical"
+          ],
+          "PATCH": [
+            "druid-historical"
+          ],
+          "STANDARD": [
+            "druid-historical"
+          ]
+        },
+        "DRUID_BROKER": {
+          "STACK-SELECT-PACKAGE": "druid-broker",
+          "INSTALL": [
+            "druid-broker"
+          ],
+          "PATCH": [
+            "druid-broker"
+          ],
+          "STANDARD": [
+            "druid-broker"
+          ]
+        },
+        "DRUID_MIDDLEMANAGER": {
+          "STACK-SELECT-PACKAGE": "druid-middlemanager",
+          "INSTALL": [
+            "druid-middlemanager"
+          ],
+          "PATCH": [
+            "druid-middlemanager"
+          ],
+          "STANDARD": [
+            "druid-middlemanager"
+          ]
+        },
+        "DRUID_ROUTER": {
+          "STACK-SELECT-PACKAGE": "druid-router",
+          "INSTALL": [
+            "druid-router"
+          ],
+          "PATCH": [
+            "druid-router"
+          ],
+          "STANDARD": [
+            "druid-router"
+          ]
+        },
+        "DRUID_SUPERSET": {
+          "STACK-SELECT-PACKAGE": "druid-superset",
+          "INSTALL": [
+            "druid-superset"
+          ],
+          "PATCH": [
+            "druid-superset"
+          ],
+          "STANDARD": [
+            "druid-superset"
+          ]
+        }
+      },
+      "FALCON": {
+        "FALCON_CLIENT": {
+          "STACK-SELECT-PACKAGE": "falcon-client",
+          "INSTALL": [
+            "falcon-client"
+          ],
+          "PATCH": [
+            "falcon-client"
+          ],
+          "STANDARD": [
+            "falcon-client"
+          ]
+        },
+        "FALCON_SERVER": {
+          "STACK-SELECT-PACKAGE": "falcon-server",
+          "INSTALL": [
+            "falcon-server"
+          ],
+          "PATCH": [
+            "falcon-server"
+          ],
+          "STANDARD": [
+            "falcon-server"
+          ]
+        }
+      },
+      "FLUME": {
+        "FLUME_HANDLER": {
+          "STACK-SELECT-PACKAGE": "flume-server",
+          "INSTALL": [
+            "flume-server"
+          ],
+          "PATCH": [
+            "flume-server"
+          ],
+          "STANDARD": [
+            "flume-server"
+          ]
+        }
+      },
+      "HBASE": {
+        "HBASE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hbase-client",
+          "INSTALL": [
+            "hbase-client"
+          ],
+          "PATCH": [
+            "hbase-client"
+          ],
+          "STANDARD": [
+            "hbase-client",
+            "phoenix-client",
+            "hadoop-client"
+          ]
+        },
+        "HBASE_MASTER": {
+          "STACK-SELECT-PACKAGE": "hbase-master",
+          "INSTALL": [
+            "hbase-master"
+          ],
+          "PATCH": [
+            "hbase-master"
+          ],
+          "STANDARD": [
+            "hbase-master"
+          ]
+        },
+        "HBASE_REGIONSERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-regionserver",
+          "INSTALL": [
+            "hbase-regionserver"
+          ],
+          "PATCH": [
+            "hbase-regionserver"
+          ],
+          "STANDARD": [
+            "hbase-regionserver"
+          ]
+        },
+        "PHOENIX_QUERY_SERVER": {
+          "STACK-SELECT-PACKAGE": "phoenix-server",
+          "INSTALL": [
+            "phoenix-server"
+          ],
+          "PATCH": [
+            "phoenix-server"
+          ],
+          "STANDARD": [
+            "phoenix-server"
+          ]
+        }
+      },
+      "HDFS": {
+        "DATANODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
+          "INSTALL": [
+            "hadoop-hdfs-datanode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-datanode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-datanode"
+          ]
+        },
+        "HDFS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
+          "INSTALL": [
+            "hadoop-hdfs-namenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-namenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-namenode"
+          ]
+        },
+        "NFS_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
+          "INSTALL": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-nfs3"
+          ]
+        },
+        "JOURNALNODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
+          "INSTALL": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-journalnode"
+          ]
+        },
+        "SECONDARY_NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
+          "INSTALL": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-secondarynamenode"
+          ]
+        },
+        "ZKFC": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
+          "INSTALL": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-zkfc"
+          ]
+        }
+      },
+      "HIVE": {
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HIVE_METASTORE": {
+          "STACK-SELECT-PACKAGE": "hive-metastore",
+          "INSTALL": [
+            "hive-metastore"
+          ],
+          "PATCH": [
+            "hive-metastore"
+          ],
+          "STANDARD": [
+            "hive-metastore"
+          ]
+        },
+        "HIVE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-server2",
+          "INSTALL": [
+            "hive-server2"
+          ],
+          "PATCH": [
+            "hive-server2"
+          ],
+          "STANDARD": [
+            "hive-server2"
+          ]
+        },
+        "HIVE_SERVER_INTERACTIVE": {
+          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
+          "INSTALL": [
+            "hive-server2-hive2"
+          ],
+          "PATCH": [
+            "hive-server2-hive2"
+          ],
+          "STANDARD": [
+            "hive-server2-hive2"
+          ]
+        },
+        "HIVE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        }
+      },
+      "KAFKA": {
+        "KAFKA_BROKER": {
+          "STACK-SELECT-PACKAGE": "kafka-broker",
+          "INSTALL": [
+            "kafka-broker"
+          ],
+          "PATCH": [
+            "kafka-broker"
+          ],
+          "STANDARD": [
+            "kafka-broker"
+          ]
+        }
+      },
+      "KNOX": {
+        "KNOX_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "knox-server",
+          "INSTALL": [
+            "knox-server"
+          ],
+          "PATCH": [
+            "knox-server"
+          ],
+          "STANDARD": [
+            "knox-server"
+          ]
+        }
+      },
+      "MAHOUT": {
+        "MAHOUT": {
+          "STACK-SELECT-PACKAGE": "mahout-client",
+          "INSTALL": [
+            "mahout-client"
+          ],
+          "PATCH": [
+            "mahout-client"
+          ],
+          "STANDARD": [
+            "mahout-client"
+          ]
+        }
+      },
+      "MAPREDUCE2": {
+        "HISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
+          "INSTALL": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "STANDARD": [
+            "hadoop-mapreduce-historyserver"
+          ]
+        },
+        "MAPREDUCE2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "OOZIE": {
+        "OOZIE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "oozie-client",
+          "INSTALL": [
+            "oozie-client"
+          ],
+          "PATCH": [
+            "oozie-client"
+          ],
+          "STANDARD": [
+            "oozie-client"
+          ]
+        },
+        "OOZIE_SERVER": {
+          "STACK-SELECT-PACKAGE": "oozie-server",
+          "INSTALL": [
+            "oozie-client",
+            "oozie-server"
+          ],
+          "PATCH": [
+            "oozie-server"
+          ],
+          "STANDARD": [
+            "oozie-client",
+            "oozie-server"
+          ]
+        }
+      },
+      "PIG": {
+        "PIG": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "R4ML": {
+        "R4ML": {
+          "STACK-SELECT-PACKAGE": "r4ml-client",
+          "INSTALL": [
+            "r4ml-client"
+          ],
+          "PATCH": [
+            "r4ml-client"
+          ],
+          "STANDARD": [
+            "r4ml-client"
+          ]
+        }
+      },
+      "RANGER": {
+        "RANGER_ADMIN": {
+          "STACK-SELECT-PACKAGE": "ranger-admin",
+          "INSTALL": [
+            "ranger-admin"
+          ],
+          "PATCH": [
+            "ranger-admin"
+          ],
+          "STANDARD": [
+            "ranger-admin"
+          ]
+        },
+        "RANGER_TAGSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-tagsync",
+          "INSTALL": [
+            "ranger-tagsync"
+          ],
+          "PATCH": [
+            "ranger-tagsync"
+          ],
+          "STANDARD": [
+            "ranger-tagsync"
+          ]
+        },
+        "RANGER_USERSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-usersync",
+          "INSTALL": [
+            "ranger-usersync"
+          ],
+          "PATCH": [
+            "ranger-usersync"
+          ],
+          "STANDARD": [
+            "ranger-usersync"
+          ]
+        }
+      },
+      "RANGER_KMS": {
+        "RANGER_KMS_SERVER": {
+          "STACK-SELECT-PACKAGE": "ranger-kms",
+          "INSTALL": [
+            "ranger-kms"
+          ],
+          "PATCH": [
+            "ranger-kms"
+          ],
+          "STANDARD": [
+            "ranger-kms"
+          ]
+        }
+      },
+      "SLIDER": {
+        "SLIDER": {
+          "STACK-SELECT-PACKAGE": "slider-client",
+          "INSTALL": [
+            "slider-client"
+          ],
+          "PATCH": [
+            "slider-client"
+          ],
+          "STANDARD": [
+            "slider-client",
+            "hadoop-client"
+          ]
+        }
+      },
+      "SPARK": {
+        "LIVY_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy-server",
+          "INSTALL": [
+            "livy-server"
+          ],
+          "PATCH": [
+            "livy-server"
+          ],
+          "STANDARD": [
+            "livy-server"
+          ]
+        },
+        "SPARK_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark-client",
+          "INSTALL": [
+            "spark-client"
+          ],
+          "PATCH": [
+            "spark-client"
+          ],
+          "STANDARD": [
+            "spark-client"
+          ]
+        },
+        "SPARK_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-historyserver",
+          "INSTALL": [
+            "spark-historyserver"
+          ],
+          "PATCH": [
+            "spark-historyserver"
+          ],
+          "STANDARD": [
+            "spark-historyserver"
+          ]
+        },
+        "SPARK_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-thriftserver",
+          "INSTALL": [
+            "spark-thriftserver"
+          ],
+          "PATCH": [
+            "spark-thriftserver"
+          ],
+          "STANDARD": [
+            "spark-thriftserver"
+          ]
+        }
+      },
+      "SPARK2": {
+        "LIVY2_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy2-server",
+          "INSTALL": [
+            "livy2-server"
+          ],
+          "PATCH": [
+            "livy2-server"
+          ],
+          "STANDARD": [
+            "livy2-server"
+          ]
+        },
+        "SPARK2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark2-client",
+          "INSTALL": [
+            "spark2-client"
+          ],
+          "PATCH": [
+            "spark2-client"
+          ],
+          "STANDARD": [
+            "spark2-client"
+          ]
+        },
+        "SPARK2_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-historyserver",
+          "INSTALL": [
+            "spark2-historyserver"
+          ],
+          "PATCH": [
+            "spark2-historyserver"
+          ],
+          "STANDARD": [
+            "spark2-historyserver"
+          ]
+        },
+        "SPARK2_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
+          "INSTALL": [
+            "spark2-thriftserver"
+          ],
+          "PATCH": [
+            "spark2-thriftserver"
+          ],
+          "STANDARD": [
+            "spark2-thriftserver"
+          ]
+        }
+      },
+      "SQOOP": {
+        "SQOOP": {
+          "STACK-SELECT-PACKAGE": "sqoop-client",
+          "INSTALL": [
+            "sqoop-client"
+          ],
+          "PATCH": [
+            "sqoop-client"
+          ],
+          "STANDARD": [
+            "sqoop-client"
+          ]
+        }
+      },
+      "STORM": {
+        "NIMBUS": {
+          "STACK-SELECT-PACKAGE": "storm-nimbus",
+          "INSTALL": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "PATCH": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-nimbus"
+          ]
+        },
+        "SUPERVISOR": {
+          "STACK-SELECT-PACKAGE": "storm-supervisor",
+          "INSTALL": [
+            "storm-supervisor"
+          ],
+          "PATCH": [
+            "storm-supervisor"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-supervisor"
+          ]
+        },
+        "DRPC_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        },
+        "STORM_UI_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        }
+      },
+      "SYSTEMML": {
+        "SYSTEMML": {
+          "STACK-SELECT-PACKAGE": "systemml-client",
+          "INSTALL": [
+            "systemml-client"
+          ],
+          "PATCH": [
+            "systemml-client"
+          ],
+          "STANDARD": [
+            "systemml-client"
+          ]
+        }
+      },
+      "TEZ": {
+        "TEZ_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "TITAN": {
+        "TITAN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "titan-client",
+          "INSTALL": [
+            "titan-client"
+          ],
+          "PATCH": [
+            "titan-client"
+          ],
+          "STANDARD": [
+            "titan-client"
+          ]
+        },
+        "TITAN_SERVER": {
+          "STACK-SELECT-PACKAGE": "titan-server",
+          "INSTALL": [
+            "titan-server"
+          ],
+          "PATCH": [
+            "titan-server"
+          ],
+          "STANDARD": [
+            "titan-server"
+          ]
+        }
+      },
+      "YARN": {
+        "APP_TIMELINE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
+          "INSTALL": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "PATCH": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-timelineserver"
+          ]
+        },
+        "NODEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
+          "INSTALL": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-nodemanager"
+          ]
+        },
+        "RESOURCEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
+          "INSTALL": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-resourcemanager"
+          ]
+        },
+        "YARN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "ZEPPELIN": {
+        "ZEPPELIN_MASTER": {
+          "STACK-SELECT-PACKAGE": "zeppelin-server",
+          "INSTALL": [
+            "zeppelin-server"
+          ],
+          "PATCH": [
+            "zeppelin-server"
+          ],
+          "STANDARD": [
+            "zeppelin-server"
+          ]
+        }
+      },
+      "ZOOKEEPER": {
+        "ZOOKEEPER_CLIENT": {
+          "STACK-SELECT-PACKAGE": "zookeeper-client",
+          "INSTALL": [
+            "zookeeper-client"
+          ],
+          "PATCH": [
+            "zookeeper-client"
+          ],
+          "STANDARD": [
+            "zookeeper-client"
+          ]
+        },
+        "ZOOKEEPER_SERVER": {
+          "STACK-SELECT-PACKAGE": "zookeeper-server",
+          "INSTALL": [
+            "zookeeper-server"
+          ],
+          "PATCH": [
+            "zookeeper-server"
+          ],
+          "STANDARD": [
+            "zookeeper-server"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
index 02810e2..dc21159 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
@@ -27,7 +27,9 @@ import os
 class TestFlumeHandler(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "FLUME/1.4.0.2.0/package"
   STACK_VERSION = "2.0.6"
-  
+
+  CONFIG_OVERRIDES = {"serviceName":"FLUME", "role":"FLUME_HANDLER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/flume_handler.py",
                        classname = "FlumeHandler",
@@ -560,6 +562,7 @@ class TestFlumeHandler(RMFTestCase):
                        classname = "FlumeHandler",
                        command = "pre_upgrade_restart",
                        config_file="flume_22.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
index cffec06..7c5c7f5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -29,6 +29,8 @@ class TestHBaseClient(RMFTestCase):
   STACK_VERSION = "2.0.6"
   TMP_PATH = '/hadoop'
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"HBASE_CLIENT"}
+
   def test_configure_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_client.py",
                    classname = "HbaseClient",
@@ -239,6 +241,7 @@ class TestHBaseClient(RMFTestCase):
                        classname = "HbaseClient",
                        command = "restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
@@ -258,7 +261,7 @@ class TestHBaseClient(RMFTestCase):
        mocks_dict['call'].call_args_list[0][0][0])
     self.assertEquals(
       ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[4][0][0])
+       mocks_dict['checked_call'].call_args_list[5][0][0])
     self.assertEquals(
       ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
        mocks_dict['call'].call_args_list[1][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index e85757b..a47bda3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -29,6 +29,8 @@ class TestHBaseMaster(RMFTestCase):
   TMP_PATH = "/hadoop"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"HBASE_MASTER"}
+
   def test_install_hbase_master_default_no_phx(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
                        classname = "HbaseMaster",
@@ -747,6 +749,7 @@ class TestHBaseMaster(RMFTestCase):
                        classname = "HbaseMaster",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        mocks_dict = mocks_dict)
@@ -770,6 +773,7 @@ class TestHBaseMaster(RMFTestCase):
                        classname = "HbaseMaster",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 93f5d19..6a2d8fb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -30,6 +30,8 @@ class TestHbaseRegionServer(RMFTestCase):
   STACK_VERSION = "2.0.6"
   TMP_PATH = '/hadoop'
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"HBASE_REGIONSERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
                    classname = "HbaseRegionServer",
@@ -540,6 +542,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        classname = "HbaseRegionServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -580,6 +583,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        classname = "HbaseRegionServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None), (0, None), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 1b324d4..973e274 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -32,6 +32,8 @@ class TestPhoenixQueryServer(RMFTestCase):
   STACK_VERSION = "2.3"
   TMP_PATH = "/hadoop"
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"PHOENIX_QUERY_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -432,6 +434,7 @@ class TestPhoenixQueryServer(RMFTestCase):
       classname = "PhoenixQueryServer",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       call_mocks = [(0, "/etc/hbase/2.3.0.0-1234/0", ''), (0, None, None), (0, None, None)],
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index d2968f8..0f31ad2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -32,6 +32,8 @@ class TestDatanode(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"DATANODE"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py",
                        classname = "DataNode",
@@ -484,6 +486,7 @@ class TestDatanode(RMFTestCase):
                        classname = "DataNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -504,6 +507,7 @@ class TestDatanode(RMFTestCase):
                        classname = "DataNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
index bcd9c80..680c984 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
@@ -37,6 +37,8 @@ class Test(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"HDFS_CLIENT"}
+
   def test_generate_configs_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hdfs_client.py",
                        classname = "HdfsClient",
@@ -81,6 +83,7 @@ class Test(RMFTestCase):
                    classname = "HdfsClient",
                    command = "restart",
                    config_file="client-upgrade.json",
+                   config_overrides = self.CONFIG_OVERRIDES,
                    stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -101,6 +104,7 @@ class Test(RMFTestCase):
                        classname = "HdfsClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -128,6 +132,7 @@ class Test(RMFTestCase):
                        classname = "HdfsClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index ff8f92e..06c5fdd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -29,6 +29,8 @@ class TestJournalnode(RMFTestCase):
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"JOURNALNODE"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/journalnode.py",
                        classname = "JournalNode",
@@ -388,6 +390,7 @@ class TestJournalnode(RMFTestCase):
                        classname = "JournalNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-journalnode', version), sudo=True,)
@@ -406,6 +409,7 @@ class TestJournalnode(RMFTestCase):
                        classname = "JournalNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 862a17e..0d27d15 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -35,6 +35,8 @@ class TestNamenode(RMFTestCase):
   STACK_VERSION = "2.0.6"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"NAMENODE"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
                        classname = "NameNode",
@@ -1401,6 +1403,7 @@ class TestNamenode(RMFTestCase):
                        classname = "NameNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -1422,6 +1425,7 @@ class TestNamenode(RMFTestCase):
                        classname = "NameNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, None), (0, None), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index de425cd..773d3fe 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -34,6 +34,8 @@ class TestNFSGateway(RMFTestCase):
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"NFS_GATEWAY"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
                        classname = "NFSGateway",
@@ -291,6 +293,7 @@ class TestNFSGateway(RMFTestCase):
                        classname = "NFSGateway",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None), (0, None), (0, None)])

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
index ff7e728..457fccf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
@@ -25,6 +25,8 @@ class TestHcatClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HIVE", "role":"HCAT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hcat_client.py",
                        classname = "HCatClient",
@@ -117,6 +119,7 @@ class TestHcatClient(RMFTestCase):
       classname = "HCatClient",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index 6afc298..3bc597e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -25,11 +25,14 @@ class TestHiveClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = { "serviceName" : "HIVE", "role" : "HIVE_CLIENT" }
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_client.py",
                        classname = "HiveClient",
                        command = "configure",
                        config_file="default_client.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -110,6 +113,7 @@ class TestHiveClient(RMFTestCase):
                        classname = "HiveClient",
                        command = "configure",
                        config_file="secured_client.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -198,6 +202,7 @@ class TestHiveClient(RMFTestCase):
                        classname = "HiveClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -219,6 +224,7 @@ class TestHiveClient(RMFTestCase):
                        classname = "HiveClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index 03dd391..a6a4fa0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -29,6 +29,8 @@ class TestWebHCatServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HIVE", "role":"WEBHCAT_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
@@ -286,6 +288,7 @@ class TestWebHCatServer(RMFTestCase):
                        classname = "WebHCatServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -308,6 +311,7 @@ class TestWebHCatServer(RMFTestCase):
                        classname = "WebHCatServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
index f7e4410..31d54ae 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
@@ -28,6 +28,8 @@ class TestOozieClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "OOZIE/4.0.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"OOZIE", "role":"OOZIE_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_client.py",
                        classname = "OozieClient",
@@ -243,6 +245,7 @@ class TestOozieClient(RMFTestCase):
                        classname = "OozieClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -264,6 +267,7 @@ class TestOozieClient(RMFTestCase):
                        classname = "OozieClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 5ef6ad9..17b8abf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -39,6 +39,8 @@ class TestOozieServer(RMFTestCase):
   UPGRADE_STACK_VERSION = "2.2"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"OOZIE", "role":"OOZIE_SERVER"}
+
   def setUp(self):
     self.maxDiff = None
 
@@ -1194,6 +1196,7 @@ class TestOozieServer(RMFTestCase):
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
      classname = "OozieServer", command = "pre_upgrade_restart", config_file = "oozie-upgrade.json",
+     config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
      call_mocks = [(0, prepare_war_stdout)])
@@ -1206,6 +1209,9 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     glob_mock.assert_called_with('/usr/hdp/2.2.1.0-2135/hadoop/lib/hadoop-lzo*.jar')
 
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', u'2.2.1.0-2135'),
+      sudo = True )
+
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.1.0-2135'),
       sudo = True )
 
@@ -1250,6 +1256,7 @@ class TestOozieServer(RMFTestCase):
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
      classname = "OozieServer", command = "pre_upgrade_restart", config_dict = json_content,
+     config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
      call_mocks = [(0, None, ''), (0, prepare_war_stdout)],
@@ -1266,6 +1273,8 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Link', '/etc/oozie/conf',
                               to = '/usr/hdp/current/oozie-client/conf',
     )
+
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
@@ -1313,6 +1322,7 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(isfile_mock.call_count,1)
     isfile_mock.assert_called_with('/usr/share/HDP-oozie/ext-2.2.zip')
 
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', u'2.2.0.0-0000'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.0.0-0000'), sudo = True)
 
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext',mode = 0777)
@@ -1507,6 +1517,7 @@ class TestOozieServer(RMFTestCase):
     glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
     self.assertResourceCalled('Link', '/etc/oozie/conf', to = '/usr/hdp/current/oozie-client/conf')
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
index 804abe7..63076f9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
@@ -25,6 +25,8 @@ class TestPigClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"PIG", "role":"PIG"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/pig_client.py",
                        classname = "PigClient",
@@ -143,6 +145,7 @@ class TestPigClient(RMFTestCase):
                        classname = "PigClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -161,6 +164,7 @@ class TestPigClient(RMFTestCase):
                        classname = "PigClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
index 87b8fec..4622ae3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
@@ -25,6 +25,8 @@ class TestSqoop(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "SQOOP/1.4.4.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"SQOOP", "role":"SQOOP"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/sqoop_client.py",
                        classname = "SqoopClient",
@@ -136,6 +138,7 @@ class TestSqoop(RMFTestCase):
                        classname = "SqoopClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index ea5b468..0cfc6df 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -34,7 +34,9 @@ class TestHistoryServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
-  
+
+  CONFIG_OVERRIDES = {"serviceName":"MAPREDUCE2", "role":"HISTORYSERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/historyserver.py",
                        classname="HistoryServer",
@@ -767,6 +769,7 @@ class TestHistoryServer(RMFTestCase):
                        classname = "HistoryServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, None), (0, None, None), (0, None, None), (0, None, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 7e06969..5898355 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -34,6 +34,8 @@ class TestMapReduce2Client(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"MAPREDUCE2", "role":"MAPREDUCE2_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mapreduce2_client.py",
                        classname = "MapReduce2Client",
@@ -390,6 +392,7 @@ class TestMapReduce2Client(RMFTestCase):
                    classname = "MapReduce2Client",
                    command = "restart",
                    config_file="client-upgrade.json",
+                   config_overrides = self.CONFIG_OVERRIDES,
                    stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -410,6 +413,7 @@ class TestMapReduce2Client(RMFTestCase):
                        classname = "MapReduce2Client",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -439,6 +443,7 @@ class TestMapReduce2Client(RMFTestCase):
                        classname = "MapReduce2Client",
                        command = "stack_upgrade_save_new_config",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index ed8fb27..d132e73 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -34,6 +34,8 @@ class TestNodeManager(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"NODEMANAGER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
                        classname="Nodemanager",
@@ -577,6 +579,7 @@ class TestNodeManager(RMFTestCase):
       classname = "Nodemanager",
       command = "post_upgrade_restart",
       config_file = "default.json",
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       checked_call_mocks = [(0, process_output)],
@@ -605,6 +608,7 @@ class TestNodeManager(RMFTestCase):
                          classname="Nodemanager",
                          command = "post_upgrade_restart",
                          config_file="default.json",
+                         config_overrides = self.CONFIG_OVERRIDES,
                          stack_version = self.STACK_VERSION,
                          target = RMFTestCase.TARGET_COMMON_SERVICES,
                          call_mocks = [(0, process_output)],
@@ -628,6 +632,7 @@ class TestNodeManager(RMFTestCase):
                          classname="Nodemanager",
                          command = "post_upgrade_restart",
                          config_file="default.json",
+                         config_overrides = self.CONFIG_OVERRIDES,
                          stack_version = self.STACK_VERSION,
                          target = RMFTestCase.TARGET_COMMON_SERVICES,
                          call_mocks = [(999, process_output)],
@@ -651,6 +656,7 @@ class TestNodeManager(RMFTestCase):
                        classname = "Nodemanager",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index e8b5f78..82d3a1c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -37,6 +37,8 @@ class TestResourceManager(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"RESOURCEMANAGER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/resourcemanager.py",
                        classname="Resourcemanager",
@@ -549,6 +551,7 @@ class TestResourceManager(RMFTestCase):
                        classname = "Resourcemanager",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index f71c93a..09a6278 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -34,6 +34,8 @@ class TestYarnClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"YARN_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/yarn_client.py",
                        classname = "YarnClient",
@@ -556,6 +558,7 @@ class TestYarnClient(RMFTestCase):
                    classname = "YarnClient",
                    command = "restart",
                    config_file="client-upgrade.json",
+                   config_overrides = self.CONFIG_OVERRIDES,
                    stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -577,6 +580,7 @@ class TestYarnClient(RMFTestCase):
                        classname = "YarnClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
index e4c6fbd..f074036 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
@@ -27,6 +27,8 @@ class TestZookeeperClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "ZOOKEEPER/3.4.5/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"ZOOKEEPER", "role":"ZOOKEEPER_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zookeeper_client.py",
                        classname = "ZookeeperClient",
@@ -170,6 +172,7 @@ class TestZookeeperClient(RMFTestCase):
                        classname = "ZookeeperClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -191,6 +194,7 @@ class TestZookeeperClient(RMFTestCase):
                        classname = "ZookeeperClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
index 6d38a67..bc58e56 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
@@ -28,6 +28,8 @@ class TestZookeeperServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "ZOOKEEPER/3.4.5/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"ZOOKEEPER", "role":"ZOOKEEPER_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zookeeper_server.py",
                        classname = "ZookeeperServer",
@@ -257,6 +259,7 @@ class TestZookeeperServer(RMFTestCase):
                        classname = "ZookeeperServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -278,6 +281,7 @@ class TestZookeeperServer(RMFTestCase):
                        classname = "ZookeeperServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -313,6 +317,7 @@ class TestZookeeperServer(RMFTestCase):
                        classname = "ZookeeperServer",
                        command = "post_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [