You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2017/10/09 06:01:19 UTC

[21/50] [abbrv] ambari git commit: AMBARI-22123 - Adding Components On Patched Clusters Can Result In Symlink Issues With conf Directories (jonathanhurley)

AMBARI-22123 - Adding Components On Patched Clusters Can Result In Symlink Issues With conf Directories (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/158bd656
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/158bd656
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/158bd656

Branch: refs/heads/branch-feature-AMBARI-14714
Commit: 158bd656dfe973d7aa487ae90735b315778c5463
Parents: 3e6aa87
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Oct 4 13:52:48 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Oct 4 13:52:48 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 285 +++++++------------
 .../custom_actions/scripts/install_packages.py  |   6 +-
 .../scripts/shared_initialization.py            |   6 +-
 .../src/test/python/TestAmbariServer.py         |   4 +-
 ambari-server/src/test/python/TestMpacks.py     |  12 +-
 .../hooks/after-INSTALL/test_after_install.py   |  50 +---
 .../stacks/2.2/common/test_conf_select.py       |  13 +-
 .../HIVE/package/scripts/hive_client.py         |   2 -
 .../HIVE/package/scripts/hive_metastore.py      |   1 -
 .../HIVE/package/scripts/hive_server.py         |   2 +-
 .../package/scripts/hive_server_interactive.py  |   1 -
 .../HIVE/package/scripts/webhcat_server.py      |   2 -
 .../scripts/application_timeline_server.py      |   1 -
 .../YARN/package/scripts/historyserver.py       |   1 -
 .../YARN/package/scripts/mapreduce2_client.py   |   2 -
 .../YARN/package/scripts/nodemanager.py         |   1 -
 .../YARN/package/scripts/resourcemanager.py     |   1 -
 .../YARN/package/scripts/yarn_client.py         |   1 -
 18 files changed, 139 insertions(+), 252 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index f330f39..c89e767 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -26,7 +26,6 @@ import subprocess
 import ambari_simplejson as json
 
 # Local Imports
-import stack_select
 from resource_management.core import shell
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script
@@ -43,9 +42,6 @@ from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import StackFeature
 
-DIRECTORY_TYPE_BACKUP = "backup"
-DIRECTORY_TYPE_CURRENT = "current"
-
 def _get_cmd(command, package, version):
   conf_selector_path = stack_tools.get_stack_tool_path(stack_tools.CONF_SELECTOR_NAME)
   return ('ambari-python-wrap', conf_selector_path, command, '--package', package, '--stack-version', version, '--conf-version', '0')
@@ -98,11 +94,17 @@ def create(stack_name, package, version, dry_run = False):
   :param dry_run: False to create the versioned config directory, True to only return what would be created
   :return List of directories created
   """
-  Logger.info("Checking if need to create versioned conf dir /etc/{0}/{1}/0".format(package, version))
   if not _valid(stack_name, package, version):
-    Logger.info("Will not create it since parameters are not valid.")
+    Logger.info("Unable to create versioned configuration directories since the parameters supplied do not support it")
     return []
 
+  # clarify the logging of what we're doing ...
+  if dry_run:
+    Logger.info(
+      "Checking to see which directories will be created for {0} on version {1}".format(package, version))
+  else:
+    Logger.info("Creating /etc/{0}/{1}/0 if it does not exist".format(package, version))
+
   command = "dry-run-create" if dry_run else "create-conf-dir"
 
   code, stdout, stderr = shell.call(_get_cmd(command, package, version), logoutput=False, quiet=False, sudo=True, stderr = subprocess.PIPE)
@@ -129,17 +131,13 @@ def create(stack_name, package, version, dry_run = False):
   return created_directories
 
 
-def select(stack_name, package, version, try_create=True, ignore_errors=False):
+def select(stack_name, package, version, ignore_errors=False):
   """
-  Selects a config version for the specified package. If this detects that
-  the stack supports configuration versioning but /etc/<component>/conf is a
-  directory, then it will attempt to bootstrap the conf.backup directory and change
-  /etc/<component>/conf into a symlink.
+  Selects a config version for the specified package.
 
   :param stack_name: the name of the stack
   :param package: the name of the package, as-used by <conf-selector-tool>
   :param version: the version number to create
-  :param try_create: optional argument to attempt to create the directory before setting it
   :param ignore_errors: optional argument to ignore any error and simply log a warning
   """
   try:
@@ -147,67 +145,8 @@ def select(stack_name, package, version, try_create=True, ignore_errors=False):
     if not _valid(stack_name, package, version):
       return
 
-    if try_create:
-      create(stack_name, package, version)
-
+    create(stack_name, package, version)
     shell.checked_call(_get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
-
-    # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
-    # points to <stack-root>/current/<component>/conf - this is because some people still prefer to
-    # use /etc/<component>/conf even though <stack-root> is the "future"
-    package_dirs = get_package_dirs()
-    if package in package_dirs:
-      Logger.info("Ensuring that {0} has the correct symlink structure".format(package))
-
-      directory_list = package_dirs[package]
-      for directory_structure in directory_list:
-        conf_dir = directory_structure["conf_dir"]
-        current_dir = directory_structure["current_dir"]
-
-        # if /etc/<component>/conf is missing or is not a symlink
-        if not os.path.islink(conf_dir):
-          # if /etc/<component>/conf is not a link and it exists, convert it to a symlink
-          if os.path.exists(conf_dir):
-            parent_directory = os.path.dirname(conf_dir)
-            conf_backup_dir = os.path.join(parent_directory, "conf.backup")
-
-            # create conf.backup and copy files to it (if it doesn't exist)
-            Execute(("cp", "-R", "-p", conf_dir, conf_backup_dir),
-              not_if = format("test -e {conf_backup_dir}"), sudo = True)
-
-            # delete the old /etc/<component>/conf directory and link to the backup
-            Directory(conf_dir, action="delete")
-            Link(conf_dir, to = conf_backup_dir)
-          else:
-            # missing entirely
-            # /etc/<component>/conf -> <stack-root>/current/<component>/conf
-            if package in ["atlas", ]:
-              #HACK for Atlas
-              '''
-              In the case of Atlas, the Hive RPM installs /usr/$stack/$version/atlas with some partial packages that
-              contain Hive hooks, while the Atlas RPM is responsible for installing the full content.
-
-              If the user does not have Atlas currently installed on their stack, then /usr/$stack/current/atlas-client
-              will be a broken symlink, and we should not create the
-              symlink /etc/atlas/conf -> /usr/$stack/current/atlas-client/conf .
-              If we mistakenly create this symlink, then when the user performs an EU/RU and then adds Atlas service
-              then the Atlas RPM will not be able to copy its artifacts into /etc/atlas/conf directory and therefore
-              prevent Ambari from by copying those unmanaged contents into /etc/atlas/$version/0
-              '''
-              component_list = default("/localComponents", [])
-              if "ATLAS_SERVER" in component_list or "ATLAS_CLIENT" in component_list:
-                Logger.info("Atlas is installed on this host.")
-                parent_dir = os.path.dirname(current_dir)
-                if os.path.exists(parent_dir):
-                  Link(conf_dir, to=current_dir)
-                else:
-                  Logger.info("Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format(conf_dir, current_dir))
-              else:
-                Logger.info("Will not create symlink from {0} to {1} because Atlas is not installed on this host.".format(conf_dir, current_dir))
-            else:
-              # Normal path for other packages
-              Link(conf_dir, to=current_dir)
-
   except Exception, exception:
     if ignore_errors is True:
       Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
@@ -242,145 +181,117 @@ def get_hadoop_conf_dir():
   return hadoop_conf_dir
 
 
-def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True,
-    link_to=DIRECTORY_TYPE_CURRENT):
+def convert_conf_directories_to_symlinks(package, version, dirs):
   """
-  Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package.
-  If the package does not exist, then no work is performed.
+  Reverses the symlinks created by the package installer and invokes the conf-select tool to
+  create versioned configuration directories for the given package. If the package does not exist,
+  then no work is performed.
 
-  - Creates a /etc/<component>/conf.backup directory
-  - Copies all configs from /etc/<component>/conf to conf.backup
-  - Removes /etc/<component>/conf
   - Creates /etc/<component>/<version>/0 via <conf-selector-tool>
+  - Creates a /etc/<component>/conf.backup directory, if needed
+  - Copies all configs from /etc/<component>/conf to conf.backup, if needed
+  - Removes /etc/<component>/conf, if needed
   - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool>
-  - Links /etc/<component>/conf to <something> depending on function paramter
-  -- /etc/<component>/conf -> <stack-root>/current/[component]-client/conf (usually)
-  -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3)
+  - Links /etc/<component>/conf -> <stack-root>/current/[component]-client/conf
 
   :param package: the package to create symlinks for (zookeeper, falcon, etc)
   :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234)
   :param dirs: the directories associated with the package (from get_package_dirs())
-  :param skip_existing_links: True to not do any work if already a symlink
-  :param link_to: link to "current" or "backup"
   """
-  # lack of enums makes this possible - we need to know what to link to
-  if link_to not in [DIRECTORY_TYPE_CURRENT, DIRECTORY_TYPE_BACKUP]:
-    raise Fail("Unsupported 'link_to' argument. Could not link package {0}".format(package))
-
+  # if the conf_dir doesn't exist, then that indicates that the package's service is not installed
+  # on this host and nothing should be done with conf symlinks
   stack_name = Script.get_stack_name()
-  bad_dirs = []
-  for dir_def in dirs:
-    if not os.path.exists(dir_def['conf_dir']):
-      bad_dirs.append(dir_def['conf_dir'])
-
-  if len(bad_dirs) > 0:
-    Logger.info("Skipping {0} as it does not exist.".format(",".join(bad_dirs)))
-    return
-
-  # existing links should be skipped since we assume there's no work to do
-  # they should be checked against the correct target though
-  if skip_existing_links:
-    bad_dirs = []
-    for dir_def in dirs:
-      # check if conf is a link already
-      old_conf = dir_def['conf_dir']
-      if os.path.islink(old_conf):
-        # it's already a link; make sure it's a link to where we want it
-        if link_to == DIRECTORY_TYPE_BACKUP:
-          target_conf_dir = _get_backup_conf_directory(old_conf)
-        else:
-          target_conf_dir = dir_def['current_dir']
-
-        # the link isn't to the right spot; re-link it
-        if os.readlink(old_conf) != target_conf_dir:
-          Logger.info("Re-linking symlink {0} to {1}".format(old_conf, target_conf_dir))
-
-          Link(old_conf, action = "delete")
-          Link(old_conf, to = target_conf_dir)
-        else:
-          Logger.info("{0} is already linked to {1}".format(old_conf, os.path.realpath(old_conf)))
-
-        bad_dirs.append(old_conf)
+  for directory_struct in dirs:
+    if not os.path.exists(directory_struct['conf_dir']):
+      Logger.info("Skipping the conf-select tool on {0} since {1} does not exist.".format(
+        package, directory_struct['conf_dir']))
 
-  if len(bad_dirs) > 0:
-    return
-
-  # make backup dir and copy everything in case configure() was called after install()
-  for dir_def in dirs:
-    old_conf = dir_def['conf_dir']
-    backup_dir = _get_backup_conf_directory(old_conf)
-    Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir))
-    Execute(("cp", "-R", "-p", unicode(old_conf), unicode(backup_dir)),
-      not_if = format("test -e {backup_dir}"), sudo = True)
-
-  # we're already in the HDP stack
-  # Create the versioned /etc/[component]/[version]/0 folder.
-  # The component must be installed on the host.
-  versioned_confs = create(stack_name, package, version, dry_run = True)
+      return
 
-  Logger.info("Package {0} will have new conf directories: {1}".format(package, ", ".join(versioned_confs)))
+  # determine which directories would be created, if any are needed
+  dry_run_directory = create(stack_name, package, version, dry_run = True)
 
   need_dirs = []
-  for d in versioned_confs:
+  for d in dry_run_directory:
     if not os.path.exists(d):
       need_dirs.append(d)
 
+  # log that we'll actually be creating some directories soon
   if len(need_dirs) > 0:
-    create(stack_name, package, version)
+    Logger.info("Package {0} will have the following new configuration directories created: {1}".format(
+      package, ", ".join(dry_run_directory)))
 
-    # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory
-    if len(dirs) > 1:
-      for need_dir in need_dirs:
-        for dir_def in dirs:
-          if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
-            old_conf = dir_def['conf_dir']
-            versioned_conf = need_dir
-            Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
-              only_if = format("ls -d {old_conf}/*"))
-    elif 1 == len(dirs) and 1 == len(need_dirs):
-      old_conf = dirs[0]['conf_dir']
-      versioned_conf = need_dirs[0]
-      Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
-        only_if = format("ls -d {old_conf}/*"))
+  # Create the versioned /etc/[component]/[version]/0 folder (using create-conf-dir) and then
+  # set it for the installed component:
+  # - Creates /etc/<component>/<version>/0
+  # - Links <stack-root>/<version>/<component>/conf -> /etc/<component>/<version>/0
+  select(stack_name, package, version, ignore_errors = True)
 
+  # check every existing link to see if it's a link and if it's pointed to the right spot
+  for directory_struct in dirs:
+    try:
+      # check if conf is a link already
+      old_conf = directory_struct['conf_dir']
+      current_dir = directory_struct['current_dir']
+      if os.path.islink(old_conf):
+        # it's already a link; make sure it's a link to where we want it
+        if os.readlink(old_conf) != current_dir:
+          # the link isn't to the right spot; re-link it
+          Logger.info("Re-linking symlink {0} to {1}".format(old_conf, current_dir))
+          Link(old_conf, action = "delete")
+          Link(old_conf, to = current_dir)
+        else:
+          Logger.info("{0} is already linked to {1}".format(old_conf, current_dir))
+      elif os.path.isdir(old_conf):
+        # the /etc/<component>/conf directory is not a link, so turn it into one
+        Logger.info("{0} is a directory - it must be converted into a symlink".format(old_conf))
 
-  # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component]
+        backup_dir = _get_backup_conf_directory(old_conf)
+        Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir))
+        Execute(("cp", "-R", "-p", old_conf, backup_dir),
+          not_if = format("test -e {backup_dir}"), sudo = True)
 
-  select(stack_name, package, version, ignore_errors = True)
+        # delete the old /etc/<component>/conf directory now that it's been backed up
+        Directory(old_conf, action = "delete")
 
-  # Symlink /etc/[component]/conf to /etc/[component]/conf.backup
-  try:
-    # No more references to /etc/[component]/conf
-    for dir_def in dirs:
-      # E.g., /etc/[component]/conf
-      new_symlink = dir_def['conf_dir']
-
-      # Delete the existing directory/link so that linking will work
-      if not os.path.islink(new_symlink):
-        Directory(new_symlink, action = "delete")
+        # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
+        Link(old_conf, to = current_dir)
       else:
-        Link(new_symlink, action = "delete")
-
-      old_conf = dir_def['conf_dir']
-      backup_dir = _get_backup_conf_directory(old_conf)
-      # link /etc/[component]/conf -> /etc/[component]/conf.backup
-      # or
-      # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
-      if link_to == DIRECTORY_TYPE_BACKUP:
-        Link(new_symlink, to=backup_dir)
-      else:
-        Link(new_symlink, to=dir_def['current_dir'])
-
-        #HACK
+        # missing entirely
+        # /etc/<component>/conf -> <stack-root>/current/<component>/conf
         if package in ["atlas", ]:
-          Logger.info("Seeding the new conf symlink {0} from the old backup directory {1} in case any "
-                      "unmanaged artifacts are needed.".format(new_symlink, backup_dir))
-          # If /etc/[component]/conf.backup exists, then copy any artifacts not managed by Ambari to the new symlink target
-          # Be careful not to clobber any existing files.
-          Execute(as_sudo(["cp", "-R", "--no-clobber", os.path.join(backup_dir, "*"), new_symlink], auto_escape=False),
-                  only_if=format("test -e {new_symlink}"))
-  except Exception, e:
-    Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error: {2}".format(package, link_to, e))
+          # HACK for Atlas
+          '''
+          In the case of Atlas, the Hive RPM installs /usr/$stack/$version/atlas with some partial packages that
+          contain Hive hooks, while the Atlas RPM is responsible for installing the full content.
+    
+          If the user does not have Atlas currently installed on their stack, then /usr/$stack/current/atlas-client
+          will be a broken symlink, and we should not create the
+          symlink /etc/atlas/conf -> /usr/$stack/current/atlas-client/conf .
+          If we mistakenly create this symlink, then when the user performs an EU/RU and then adds Atlas service
+          then the Atlas RPM will not be able to copy its artifacts into /etc/atlas/conf directory and therefore
+          prevent Ambari from by copying those unmanaged contents into /etc/atlas/$version/0
+          '''
+          component_list = default("/localComponents", [])
+          if "ATLAS_SERVER" in component_list or "ATLAS_CLIENT" in component_list:
+            Logger.info("Atlas is installed on this host.")
+            parent_dir = os.path.dirname(current_dir)
+            if os.path.exists(parent_dir):
+              Link(old_conf, to = current_dir)
+            else:
+              Logger.info(
+                "Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format(
+                  old_conf, current_dir))
+          else:
+            Logger.info(
+            "Will not create symlink from {0} to {1} because Atlas is not installed on this host.".format(
+              old_conf, current_dir))
+        else:
+          # Normal path for other packages
+          Link(old_conf, to = current_dir)
+
+    except Exception, e:
+      Logger.warning("Could not change symlink for package {0} to point to current directory. Error: {1}".format(package, e))
 
 
 def _seed_new_configuration_directories(package, created_directories):
@@ -460,4 +371,4 @@ def _get_backup_conf_directory(old_conf):
   """
   old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
   backup_dir = os.path.join(old_parent, "conf.backup")
-  return backup_dir
+  return backup_dir
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index dcf3544..c5e4ae7 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -146,10 +146,10 @@ class InstallPackages(Script):
 
     # if installing a version of HDP that needs some symlink love, then create them
     if is_package_install_successful and 'actual_version' in self.structured_output:
-      self._create_config_links_if_necessary(stack_id, self.structured_output['actual_version'])
+      self._relink_configurations_with_conf_select(stack_id, self.structured_output['actual_version'])
 
 
-  def _create_config_links_if_necessary(self, stack_id, stack_version):
+  def _relink_configurations_with_conf_select(self, stack_id, stack_version):
     """
     Sets up the required structure for /etc/<component>/conf symlinks and <stack-root>/current
     configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3
@@ -177,7 +177,7 @@ class InstallPackages(Script):
       Link("/usr/bin/conf-select", to="/usr/bin/hdfconf-select")
 
     for package_name, directories in conf_select.get_package_dirs().iteritems():
-      conf_select.select(self.stack_name, package_name, stack_version, ignore_errors = True)
+      conf_select.convert_conf_directories_to_symlinks(package_name, stack_version, directories)
 
   def compute_actual_version(self):
     """

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
index 67c3ba8..1a4b074 100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
@@ -116,7 +116,7 @@ def load_version(struct_out_file):
 
 def link_configs(struct_out_file):
   """
-  Links configs, only on a fresh install of HDP-2.3 and higher
+  Use the conf_select module to link configuration directories correctly.
   """
   import params
 
@@ -128,5 +128,5 @@ def link_configs(struct_out_file):
 
   # On parallel command execution this should be executed by a single process at a time.
   with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-    for k, v in conf_select.get_package_dirs().iteritems():
-      conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
\ No newline at end of file
+    for package_name, directories in conf_select.get_package_dirs().iteritems():
+      conf_select.convert_conf_directories_to_symlinks(package_name, json_version, directories)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 1c4ebaf..d064b00 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -3693,6 +3693,7 @@ class TestAmbariServer(TestCase):
   @patch("ambari_server.dbConfiguration_linux.LinuxDBMSConfig.ensure_jdbc_driver_installed")
   @patch("ambari_server.dbConfiguration_linux.get_YN_input")
   @patch("ambari_server.serverSetup.update_properties")
+  @patch("ambari_server.dbConfiguration.get_ambari_properties")
   @patch("ambari_server.dbConfiguration_linux.get_ambari_properties")
   @patch("ambari_server.dbConfiguration_linux.store_password_file")
   @patch("ambari_server.dbConfiguration_linux.run_os_command")
@@ -3724,7 +3725,7 @@ class TestAmbariServer(TestCase):
                  get_YN_input_mock, gvsi_mock, gvsi_1_mock,
                  read_password_mock, verify_setup_allowed_method, is_jdbc_user_changed_mock, check_postgre_up_mock,
                  configure_postgres_mock, run_os_command_1_mock,
-                 store_password_file_mock, get_ambari_properties_1_mock, update_properties_mock,
+                 store_password_file_mock, get_ambari_properties_1_mock, get_ambari_properties_2_mock, update_properties_mock,
                  get_YN_input_1_mock, ensure_jdbc_driver_installed_mock,
                  remove_file_mock, isfile_mock, exists_mock,
                  run_os_command_mock, get_pw_nam_mock):
@@ -3768,6 +3769,7 @@ class TestAmbariServer(TestCase):
     read_password_mock.return_value = "bigdata2"
     get_ambari_properties_mock.return_value = properties
     get_ambari_properties_1_mock.return_value = properties
+    get_ambari_properties_2_mock.return_value = properties
     store_password_file_mock.return_value = "encrypted_bigdata2"
     ensure_jdbc_driver_installed_mock.return_value = True
     check_postgre_up_mock.return_value = (PGConfig.PG_STATUS_RUNNING, 0, "", "")

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-server/src/test/python/TestMpacks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestMpacks.py b/ambari-server/src/test/python/TestMpacks.py
index 98de76c..b290665 100644
--- a/ambari-server/src/test/python/TestMpacks.py
+++ b/ambari-server/src/test/python/TestMpacks.py
@@ -260,6 +260,7 @@ class TestMpacks(TestCase):
   @patch("os.path.exists")
   @patch("shutil.move")
   @patch("os.mkdir")
+  @patch("ambari_server.setupMpacks.read_ambari_user")
   @patch("ambari_server.setupMpacks.create_symlink")
   @patch("ambari_server.setupMpacks.get_ambari_version")
   @patch("ambari_server.setupMpacks.get_ambari_properties")
@@ -272,7 +273,7 @@ class TestMpacks(TestCase):
   @patch("ambari_server.setupMpacks.set_file_permissions")
   def test_install_stack_mpack(self, set_file_permissions_mock, validate_purge_mock, run_os_command_mock, download_mpack_mock, expand_mpack_mock, purge_stacks_and_mpacks_mock,
                                      add_replay_log_mock, get_ambari_properties_mock, get_ambari_version_mock,
-                                     create_symlink_mock, os_mkdir_mock, shutil_move_mock, os_path_exists_mock):
+                                     create_symlink_mock, read_ambari_user_mock, os_mkdir_mock, shutil_move_mock, os_path_exists_mock):
     options = self._create_empty_options_mock()
     options.mpack_path = "/path/to/mystack.tar.gz"
     options.purge = True
@@ -409,6 +410,7 @@ class TestMpacks(TestCase):
   @patch("os.path.exists")
   @patch("shutil.move")
   @patch("os.mkdir")
+  @patch("ambari_server.setupMpacks.read_ambari_user")
   @patch("ambari_server.setupMpacks.create_symlink")
   @patch("ambari_server.setupMpacks.get_ambari_version")
   @patch("ambari_server.setupMpacks.get_ambari_properties")
@@ -420,7 +422,7 @@ class TestMpacks(TestCase):
 
   def test_install_extension_mpack(self, set_file_permissions_mock, download_mpack_mock, expand_mpack_mock, add_replay_log_mock,
       purge_stacks_and_mpacks_mock, get_ambari_properties_mock, get_ambari_version_mock,
-      create_symlink_mock, os_mkdir_mock, shutil_move_mock, os_path_exists_mock):
+      create_symlink_mock, read_ambari_user_mock, os_mkdir_mock, shutil_move_mock, os_path_exists_mock):
     options = self._create_empty_options_mock()
     options.mpack_path = "/path/to/myextension.tar.gz"
     options.purge = False
@@ -486,6 +488,7 @@ class TestMpacks(TestCase):
   @patch("os.symlink")
   @patch("shutil.move")
   @patch("os.mkdir")
+  @patch("ambari_server.setupMpacks.read_ambari_user")
   @patch("ambari_server.setupMpacks.create_symlink")
   @patch("ambari_server.setupMpacks.get_ambari_version")
   @patch("ambari_server.setupMpacks.get_ambari_properties")
@@ -496,7 +499,7 @@ class TestMpacks(TestCase):
   @patch("ambari_server.setupMpacks.set_file_permissions")
   def test_install_addon_service_mpack(self, set_file_permissions_mock, download_mpack_mock, expand_mpack_mock, purge_stacks_and_mpacks_mock,
                                        add_replay_log_mock, get_ambari_properties_mock, get_ambari_version_mock,
-                                       create_symlink_mock, os_mkdir_mock, shutil_move_mock,os_symlink_mock,
+                                       create_symlink_mock, read_ambari_user_mock, os_mkdir_mock, shutil_move_mock,os_symlink_mock,
                                        os_path_isdir_mock, os_path_exists_mock ):
     options = self._create_empty_options_mock()
     options.mpack_path = "/path/to/myservice.tar.gz"
@@ -575,6 +578,7 @@ class TestMpacks(TestCase):
   @patch("os.path.exists")
   @patch("shutil.move")
   @patch("os.mkdir")
+  @patch("ambari_server.setupMpacks.read_ambari_user")
   @patch("ambari_server.setupMpacks.create_symlink")
   @patch("ambari_server.setupMpacks.get_ambari_version")
   @patch("ambari_server.setupMpacks.get_ambari_properties")
@@ -588,7 +592,7 @@ class TestMpacks(TestCase):
 
   def test_upgrade_stack_mpack(self, set_file_permissions_mock, run_os_command_mock, download_mpack_mock, expand_mpack_mock, purge_stacks_and_mpacks_mock,
                                _uninstall_mpack_mock, add_replay_log_mock, get_ambari_properties_mock,
-                               get_ambari_version_mock, create_symlink_mock, os_mkdir_mock, shutil_move_mock,
+                               get_ambari_version_mock, create_symlink_mock, read_ambari_user_mock, os_mkdir_mock, shutil_move_mock,
                                os_path_exists_mock, create_symlink_using_path_mock):
     options = self._create_empty_options_mock()
     options.mpack_path = "/path/to/mystack-1.0.0.1.tar.gz"

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index d792192..19c81a8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -19,6 +19,7 @@ limitations under the License.
 '''
 
 import json
+
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 from resource_management.core.logger import Logger
@@ -63,7 +64,7 @@ class TestHookAfterInstall(RMFTestCase):
                               create_parents = True)
     self.assertNoMoreResources()
 
-
+  @patch("os.path.isdir", new = MagicMock(return_value = True))
   @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
@@ -115,25 +116,17 @@ class TestHookAfterInstall(RMFTestCase):
       for dir_def in dir_defs:
         conf_dir = dir_def['conf_dir']
         conf_backup_dir = conf_dir + ".backup"
+        current_dir = dir_def['current_dir']
         self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
             not_if = 'test -e ' + conf_backup_dir,
             sudo = True,)
 
-      for dir_def in dir_defs:
-        conf_dir = dir_def['conf_dir']
-        current_dir = dir_def['current_dir']
-        self.assertResourceCalled('Directory', conf_dir,
-            action = ['delete'],)
-        self.assertResourceCalled('Link', conf_dir,
-            to = current_dir,)
-
-      #HACK for Atlas
-      if package in ["atlas", ]:
-        self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R --no-clobber /etc/atlas/conf.backup/* /etc/atlas/conf',
-                                  only_if = 'test -e ' + "/etc/atlas/conf")
+        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
+        self.assertResourceCalled('Link', conf_dir, to = current_dir,)
 
     self.assertNoMoreResources()
 
+  @patch("os.path.isdir", new = MagicMock(return_value = True))
   @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
@@ -191,22 +184,13 @@ class TestHookAfterInstall(RMFTestCase):
       for dir_def in dir_defs:
         conf_dir = dir_def['conf_dir']
         conf_backup_dir = conf_dir + ".backup"
+        current_dir = dir_def['current_dir']
         self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
             not_if = 'test -e ' + conf_backup_dir,
             sudo = True,)
 
-      for dir_def in dir_defs:
-        conf_dir = dir_def['conf_dir']
-        current_dir = dir_def['current_dir']
-        self.assertResourceCalled('Directory', conf_dir,
-            action = ['delete'],)
-        self.assertResourceCalled('Link', conf_dir,
-            to = current_dir,)
-
-      #HACK for Atlas
-      if package in ["atlas", ]:
-        self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R --no-clobber /etc/atlas/conf.backup/* /etc/atlas/conf',
-                                  only_if = 'test -e ' + "/etc/atlas/conf")
+        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
+        self.assertResourceCalled('Link', conf_dir, to = current_dir,)
 
     self.assertNoMoreResources()
 
@@ -252,6 +236,7 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
       sudo = True)
 
+  @patch("os.path.isdir", new = MagicMock(return_value = True))
   @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
   @patch("resource_management.libraries.functions.conf_select.select")
@@ -302,22 +287,13 @@ class TestHookAfterInstall(RMFTestCase):
       for dir_def in dir_defs:
         conf_dir = dir_def['conf_dir']
         conf_backup_dir = conf_dir + ".backup"
+        current_dir = dir_def['current_dir']
         self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
             not_if = 'test -e ' + conf_backup_dir,
             sudo = True,)
 
-      for dir_def in dir_defs:
-        conf_dir = dir_def['conf_dir']
-        current_dir = dir_def['current_dir']
-        self.assertResourceCalled('Directory', conf_dir,
-            action = ['delete'],)
-        self.assertResourceCalled('Link', conf_dir,
-            to = current_dir,)
-
-      #HACK for Atlas
-      if package in ["atlas", ]:
-        self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R --no-clobber /etc/atlas/conf.backup/* /etc/atlas/conf',
-                                  only_if = 'test -e ' + "/etc/atlas/conf")
+        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
+        self.assertResourceCalled('Link', conf_dir, to = current_dir,)
 
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
index 2eeec46..92dd634 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
@@ -100,12 +100,13 @@ class TestConfSelect(RMFTestCase):
 
 
   @patch("resource_management.core.shell.call")
+  @patch.object(os.path, "isdir")
   @patch.object(os.path, "exists")
   @patch.object(os.path, "islink")
   @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value = True))
   @patch("resource_management.libraries.functions.conf_select.create", new = MagicMock(return_value = ["/etc/hadoop/2.3.0.0-1234/0"]))
   @patch("resource_management.libraries.functions.conf_select.select", new = MagicMock())
-  def test_symlink_conversion_to_current(self, islink_mock, path_mock, shell_call_mock):
+  def test_symlink_conversion_to_current(self, islink_mock, path_mock, isdir_mock, shell_call_mock):
     """
     Tests that conf-select creates the correct symlink directories.
     :return:
@@ -134,6 +135,13 @@ class TestConfSelect(RMFTestCase):
 
       return False
 
+    def isdir_mock_call(path):
+      if path == "/etc/hadoop/conf":
+        return True
+
+      return False
+
+
     packages = conf_select.get_package_dirs()
 
     path_mock.side_effect = path_mock_call
@@ -175,7 +183,6 @@ class TestConfSelect(RMFTestCase):
     """
     packages = conf_select.get_package_dirs()
 
-    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234",
-      packages["hadoop"], link_to = conf_select.DIRECTORY_TYPE_BACKUP)
+    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234", packages["hadoop"])
 
     self.assertEqual(pprint.pformat(self.env.resource_list), "[]")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_client.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_client.py
index 3d9bfd7..55cf61a 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_client.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_client.py
@@ -59,8 +59,6 @@ class HiveClientDefault(HiveClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-client", params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
index a49bbd9..a69460e 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
@@ -105,7 +105,6 @@ class HiveMetastoreDefault(HiveMetastore):
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select("hive-metastore", params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
index 31b083b..6aeaf80 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server.py
@@ -119,7 +119,7 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
+
       stack_select.select("hive-server2", params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_interactive.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_interactive.py
index 2df001c..beb1220 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_interactive.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_interactive.py
@@ -87,7 +87,6 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
         stack_select.select("hive-server2-hive2", params.version)
-        conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS
         resource_created = copy_to_hdfs(

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
index 34687c4..ee9087d 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_server.py
@@ -79,8 +79,6 @@ class WebHCatServerDefault(WebHCatServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
-      conf_select.select(params.stack_name, "hive-hcatalog", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hive-webhcat", params.version)
 
   def security_status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
index 4ec6aa7..e3a81cf 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/application_timeline_server.py
@@ -72,7 +72,6 @@ class ApplicationTimelineServerDefault(ApplicationTimelineServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-yarn-timelineserver", params.version)
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
index 34c683a..f933e91 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/historyserver.py
@@ -79,7 +79,6 @@ class HistoryServerDefault(HistoryServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-mapreduce-historyserver", params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
index 424157b..8de9d56 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/mapreduce2_client.py
@@ -71,7 +71,6 @@ class MapReduce2Client(Script):
       # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
 
@@ -90,7 +89,6 @@ class MapReduce2ClientDefault(MapReduce2Client):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-client", params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
index b235cad..4f3eecb 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/nodemanager.py
@@ -74,7 +74,6 @@ class NodemanagerDefault(Nodemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-yarn-nodemanager", params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
index 71c7bc1..12c279a 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/resourcemanager.py
@@ -114,7 +114,6 @@ class ResourcemanagerDefault(Resourcemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-yarn-resourcemanager", params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/158bd656/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/yarn_client.py b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/yarn_client.py
index 4d65a40..5cd2e69 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/yarn_client.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/YARN/package/scripts/yarn_client.py
@@ -59,7 +59,6 @@ class YarnClientDefault(YarnClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select("hadoop-client", params.version)