You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2018/08/08 15:11:50 UTC

[ambari] branch branch-feature-AMBARI-14714 updated: [AMBARI-24410] - Remove conf-select Tool From Ambari Framework (#1981)

This is an automated email from the ASF dual-hosted git repository.

jonathanhurley pushed a commit to branch branch-feature-AMBARI-14714
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-feature-AMBARI-14714 by this push:
     new f54cef1  [AMBARI-24410] - Remove conf-select Tool From Ambari Framework (#1981)
f54cef1 is described below

commit f54cef1e337986264bc6c38a6ff042b02f148b9f
Author: Jonathan Hurley <jo...@apache.org>
AuthorDate: Wed Aug 8 11:11:48 2018 -0400

    [AMBARI-24410] - Remove conf-select Tool From Ambari Framework (#1981)
---
 .../libraries/functions/conf_select.py             | 387 +--------------------
 .../libraries/functions/stack_tools.py             |   1 -
 .../resource_management/libraries/script/script.py |   2 -
 .../stack-hooks/after-INSTALL/scripts/hook.py      |   6 +-
 .../stack-hooks/after-INSTALL/scripts/params.py    |   2 -
 .../after-INSTALL/scripts/shared_initialization.py |  18 -
 .../hooks/after-INSTALL/test_after_install.py      | 195 -----------
 .../python/stacks/2.2/common/test_conf_select.py   | 200 -----------
 8 files changed, 6 insertions(+), 805 deletions(-)

diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 7b3f66d..f285c32 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -18,144 +18,24 @@ limitations under the License.
 
 """
 
-__all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir", "get_package_dirs"]
+__all__ = ["get_hadoop_conf_dir"]
 
 # Python Imports
 import os
-from ambari_commons import subprocess32
-import ambari_simplejson as json
 
 # Local Imports
-from resource_management.core import shell
-from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script
 from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Directory
-from resource_management.core.resources.system import Execute
-from resource_management.core.resources.system import Link
 from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions.default import default
-from resource_management.libraries.functions import stack_tools
-from resource_management.core.exceptions import Fail
 from resource_management.core import sudo
-from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.decorator import deprecated
 
-def _get_cmd(command, package, version):
-  conf_selector_path = stack_tools.get_stack_tool_path(stack_tools.CONF_SELECTOR_NAME)
-  return ('ambari-python-wrap', conf_selector_path, command, '--package', package, '--stack-version', version, '--conf-version', '0')
-
-def _valid(stack_name, package, ver):
-  return (ver and check_stack_feature(StackFeature.CONFIG_VERSIONING, ver))
-
-def get_package_dirs():
-  """
-  Get package dir mappings
-  :return:
-  """
-  stack_name = default("/stackSettings/stack_name", None)
-  if stack_name is None:
-    raise Fail("The stack name is not present in the command. Packages for conf-select tool cannot be loaded.")
-
-  stack_packages_config = default("/configurations/cluster-env/stack_packages", None)
-  if stack_packages_config is None:
-    raise Fail("The stack packages are not defined on the command. Unable to load packages for the conf-select tool")
-
-  data = json.loads(stack_packages_config)
-
-  if stack_name not in data:
-    raise Fail(
-      "Cannot find conf-select packages for the {0} stack".format(stack_name))
-
-  conf_select_key = "conf-select"
-  data = data[stack_name]
-  if conf_select_key not in data:
-    raise Fail(
-      "There are no conf-select packages defined for this command for the {0} stack".format(stack_name))
-
-  package_dirs = data[conf_select_key]
-
-  stack_root = Script.get_stack_root()
-  for package_name, directories in package_dirs.iteritems():
-    for dir in directories:
-      current_dir = dir['current_dir']
-      current_dir =  current_dir.format(stack_root)
-      dir['current_dir'] = current_dir
-
-  return package_dirs
-
-def create(stack_name, package, version, dry_run = False):
-  """
-  Creates a config version for the specified package
-  :param stack_name: the name of the stack
-  :param package: the name of the package, as-used by <conf-selector-tool>
-  :param version: the version number to create
-  :param dry_run: False to create the versioned config directory, True to only return what would be created
-  :return List of directories created
-  """
-  if not _valid(stack_name, package, version):
-    Logger.info("Unable to create versioned configuration directories since the parameters supplied do not support it")
-    return []
-
-  # clarify the logging of what we're doing ...
-  if dry_run:
-    Logger.info(
-      "Checking to see which directories will be created for {0} on version {1}".format(package, version))
-  else:
-    Logger.info("Creating /etc/{0}/{1}/0 if it does not exist".format(package, version))
-
-  command = "dry-run-create" if dry_run else "create-conf-dir"
-
-  code, stdout, stderr = shell.call(_get_cmd(command, package, version), logoutput=False, quiet=False, sudo=True, stderr = subprocess32.PIPE)
-
-  # <conf-selector-tool> can set more than one directory
-  # per package, so return that list, especially for dry_run
-  # > <conf-selector-tool> dry-run-create --package hive-hcatalog --stack-version 2.4.0.0-169 0
-  # /etc/hive-webhcat/2.4.0.0-169/0
-  # /etc/hive-hcatalog/2.4.0.0-169/0
-  created_directories = []
-  if 0 == code and stdout is not None: # just be sure we have a stdout
-    for line in stdout.splitlines():
-      created_directories.append(line.rstrip('\n'))
-
-  # if directories were created, then do some post-processing
-  if not code and stdout and not dry_run:
-    # take care of permissions if directories were created
-    for directory in created_directories:
-      Directory(directory, mode=0755, cd_access='a', create_parents=True)
-
-    # seed the new directories with configurations from the old (current) directories
-    _seed_new_configuration_directories(package, created_directories)
-
-  return created_directories
-
-
-def select(stack_name, package, version, ignore_errors=False):
-  """
-  Selects a config version for the specified package.
-
-  :param stack_name: the name of the stack
-  :param package: the name of the package, as-used by <conf-selector-tool>
-  :param version: the version number to create
-  :param ignore_errors: optional argument to ignore any error and simply log a warning
-  """
-  try:
-    # do nothing if the stack does not support versioned configurations
-    if not _valid(stack_name, package, version):
-      return
-
-    create(stack_name, package, version)
-    shell.checked_call(_get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
-  except Exception, exception:
-    if ignore_errors is True:
-      Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
-        str(exception)))
-    else:
-      raise
-
-
-
+@deprecated(comment = "The conf-select tools are no longer used. In order to get the hadoop conf "
+                      "directory, the mpack_manager_helper module should be used to get the conf "
+                      "directory of HADOOP_CLIENTS for the component's service group.")
 def get_hadoop_conf_dir():
   """
   Return the hadoop shared conf directory which should be used for the command's component. The
@@ -180,260 +60,3 @@ def get_hadoop_conf_dir():
 
   return hadoop_conf_dir
 
-
-def convert_conf_directories_to_symlinks(package, version, dirs):
-  """
-  Reverses the symlinks created by the package installer and invokes the conf-select tool to
-  create versioned configuration directories for the given package. If the package does not exist,
-  then no work is performed.
-
-  - Creates /etc/<component>/<version>/0 via <conf-selector-tool>
-  - Creates a /etc/<component>/conf.backup directory, if needed
-  - Copies all configs from /etc/<component>/conf to conf.backup, if needed
-  - Removes /etc/<component>/conf, if needed
-  - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool>
-  - Links /etc/<component>/conf -> <stack-root>/current/[component]-client/conf
-
-  :param package: the package to create symlinks for (zookeeper, falcon, etc)
-  :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234)
-  :param dirs: the directories associated with the package (from get_package_dirs())
-  """
-  # if the conf_dir doesn't exist, then that indicates that the package's service is not installed
-  # on this host and nothing should be done with conf symlinks
-  stack_name = Script.get_stack_name()
-  for directory_struct in dirs:
-    if not os.path.exists(directory_struct['conf_dir']):
-      Logger.info("Skipping the conf-select tool on {0} since {1} does not exist.".format(
-        package, directory_struct['conf_dir']))
-
-      return
-
-  # determine which directories would be created, if any are needed
-  dry_run_directory = create(stack_name, package, version, dry_run = True)
-
-  # if the dry run reported an error, then we must assume that the package does not exist in
-  # the conf-select tool
-  if len(dry_run_directory) == 0:
-    Logger.info("The conf-select tool reported an error for the package {0}. The configuration linking will be skipped.".format(package))
-    return
-
-
-  need_dirs = []
-  for d in dry_run_directory:
-    if not os.path.exists(d):
-      need_dirs.append(d)
-
-  # log that we'll actually be creating some directories soon
-  if len(need_dirs) > 0:
-    Logger.info("Package {0} will have the following new configuration directories created: {1}".format(
-      package, ", ".join(dry_run_directory)))
-
-  # Create the versioned /etc/[component]/[version]/0 folder (using create-conf-dir) and then
-  # set it for the installed component:
-  # - Creates /etc/<component>/<version>/0
-  # - Links <stack-root>/<version>/<component>/conf -> /etc/<component>/<version>/0
-  select(stack_name, package, version, ignore_errors = True)
-
-  # check every existing link to see if it's a link and if it's pointed to the right spot
-  for directory_struct in dirs:
-    try:
-      # check if conf is a link already
-      old_conf = directory_struct['conf_dir']
-      current_dir = directory_struct['current_dir']
-      if os.path.islink(old_conf):
-        # it's already a link; make sure it's a link to where we want it
-        if os.readlink(old_conf) != current_dir:
-          # the link isn't to the right spot; re-link it
-          Logger.info("Re-linking symlink {0} to {1}".format(old_conf, current_dir))
-          Link(old_conf, action = "delete")
-          Link(old_conf, to = current_dir)
-        else:
-          Logger.info("{0} is already linked to {1}".format(old_conf, current_dir))
-      elif os.path.isdir(old_conf):
-        # the /etc/<component>/conf directory is not a link, so turn it into one
-        Logger.info("{0} is a directory - it must be converted into a symlink".format(old_conf))
-
-        backup_dir = _get_backup_conf_directory(old_conf)
-        Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir))
-        Execute(("cp", "-R", "-p", old_conf, backup_dir),
-          not_if = format("test -e {backup_dir}"), sudo = True)
-
-        # delete the old /etc/<component>/conf directory now that it's been backed up
-        Directory(old_conf, action = "delete")
-
-        # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
-        Link(old_conf, to = current_dir)
-      else:
-        # missing entirely
-        # /etc/<component>/conf -> <stack-root>/current/<component>/conf
-        if package in ["atlas", ]:
-          # HACK for Atlas
-          '''
-          In the case of Atlas, the Hive RPM installs /usr/$stack/$version/atlas with some partial packages that
-          contain Hive hooks, while the Atlas RPM is responsible for installing the full content.
-
-          If the user does not have Atlas currently installed on their stack, then /usr/$stack/current/atlas-client
-          will be a broken symlink, and we should not create the
-          symlink /etc/atlas/conf -> /usr/$stack/current/atlas-client/conf .
-          If we mistakenly create this symlink, then when the user performs an EU/RU and then adds Atlas service
-          then the Atlas RPM will not be able to copy its artifacts into /etc/atlas/conf directory and therefore
-          prevent Ambari from by copying those unmanaged contents into /etc/atlas/$version/0
-          '''
-          component_list = default("/localComponents", [])
-          if "ATLAS_SERVER" in component_list or "ATLAS_CLIENT" in component_list:
-            Logger.info("Atlas is installed on this host.")
-            parent_dir = os.path.dirname(current_dir)
-            if os.path.exists(parent_dir):
-              Link(old_conf, to = current_dir)
-            else:
-              Logger.info(
-                "Will not create symlink from {0} to {1} because the destination's parent dir does not exist.".format(
-                  old_conf, current_dir))
-          else:
-            Logger.info(
-            "Will not create symlink from {0} to {1} because Atlas is not installed on this host.".format(
-              old_conf, current_dir))
-        else:
-          # Normal path for other packages
-          Link(old_conf, to = current_dir)
-
-    except Exception, e:
-      Logger.warning("Could not change symlink for package {0} to point to current directory. Error: {1}".format(package, e))
-
-
-def get_restricted_packages():
-  """
-  Gets the list of conf-select 'package' names that need to be invoked on the command.
-  When the server passes down the list of packages to install, check the service names
-  and use the information in stack_packages json to determine the list of packages that should
-  be executed.  That is valid only for PATCH or MAINT upgrades.  STANDARD upgrades should be
-  conf-select'ing everything it can find.
-  """
-  package_names = []
-
-  # shortcut the common case if we are not patching
-  cluster_version_summary = default("/roleParameters/cluster_version_summary/services", None)
-
-  if cluster_version_summary is None:
-    Logger.info("Cluster Summary is not available, there are no restrictions for conf-select")
-    return package_names
-
-  service_names = []
-
-  # pick out the services that are targeted
-  for servicename, servicedetail in cluster_version_summary.iteritems():
-    if servicedetail['upgrade']:
-      service_names.append(servicename)
-
-  if 0 == len(service_names):
-    Logger.info("No services found, there are no restrictions for conf-select")
-    return package_names
-
-  stack_name = default("/stackSettings/stack_name", None)
-  if stack_name is None:
-    Logger.info("The stack name is not present in the command. Restricted names skipped.")
-    return package_names
-
-  stack_packages_config = default("/configurations/cluster-env/stack_packages", None)
-  if stack_packages_config is None:
-    Logger.info("The stack packages are not defined on the command. Restricted names skipped.")
-    return package_names
-
-  data = json.loads(stack_packages_config)
-
-  if stack_name not in data:
-    Logger.info("Cannot find conf-select packages for the {0} stack".format(stack_name))
-    return package_names
-
-  conf_select_key = "conf-select-patching"
-  if conf_select_key not in data[stack_name]:
-    Logger.info("There are no conf-select-patching elements defined for this command for the {0} stack".format(stack_name))
-    return package_names
-
-  service_dict = data[stack_name][conf_select_key]
-
-  for servicename in service_names:
-    if servicename in service_dict and 'packages' in service_dict[servicename]:
-      package_names.extend(service_dict[servicename]['packages'])
-
-  return package_names
-
-
-def _seed_new_configuration_directories(package, created_directories):
-  """
-  Copies any files from the "current" configuration directory to the directories which were
-  newly created with <conf-selector-tool>. This function helps ensure that files which are not tracked
-  by Ambari will be available after performing a stack upgrade. Although old configurations
-  will be copied as well, they will be overwritten when the components are writing out their
-  configs after upgrade during their restart.
-
-  This function will catch all errors, logging them, but not raising an exception. This is to
-  prevent problems here from stopping and otherwise healthy upgrade.
-
-  :param package: the <conf-selector-tool> package name
-  :param created_directories: a list of directories that <conf-selector-tool> said it created
-  :return: None
-  """
-  package_dirs = get_package_dirs()
-  if package not in package_dirs:
-    Logger.warning("Unable to seed newly created configuration directories for {0} because it is an unknown component".format(package))
-    return
-
-  # seed the directories with any existing configurations
-  # this allows files which are not tracked by Ambari to be available after an upgrade
-  Logger.info("Seeding versioned configuration directories for {0}".format(package))
-  expected_directories = package_dirs[package]
-
-  try:
-    # if the expected directories don't match those created, we can't seed them
-    if len(created_directories) != len(expected_directories):
-      Logger.warning("The known configuration directories for {0} do not match those created by conf-select: {1}".format(
-        package, str(created_directories)))
-
-      return
-
-    # short circuit for a simple 1:1 mapping
-    if len(expected_directories) == 1:
-      # <stack-root>/current/component/conf
-      # the current directory is the source of the seeded configurations;
-      source_seed_directory = expected_directories[0]["current_dir"]
-      target_seed_directory = created_directories[0]
-      _copy_configurations(source_seed_directory, target_seed_directory)
-    else:
-      for created_directory in created_directories:
-        for expected_directory_structure in expected_directories:
-          prefix = expected_directory_structure.get("prefix", None)
-          if prefix is not None and created_directory.startswith(prefix):
-            source_seed_directory = expected_directory_structure["current_dir"]
-            target_seed_directory = created_directory
-            _copy_configurations(source_seed_directory, target_seed_directory)
-
-  except Exception, e:
-    Logger.warning("Unable to seed new configuration directories for {0}. {1}".format(package, str(e)))
-
-
-def _copy_configurations(source_directory, target_directory):
-  """
-  Copies from the source directory to the target directory. If the source directory is a symlink
-  then it will be followed (deferenced) but any other symlinks found to copy will not be. This
-  will ensure that if the configuration directory itself is a symlink, then it's contents will be
-  copied, preserving and children found which are also symlinks.
-
-  :param source_directory:  the source directory to copy from
-  :param target_directory:  the target directory to copy to
-  :return: None
-  """
-  # append trailing slash so the cp command works correctly WRT recursion and symlinks
-  source_directory = os.path.join(source_directory, "*")
-  Execute(as_sudo(["cp", "-R", "-p", "-v", source_directory, target_directory], auto_escape = False),
-    logoutput = True)
-
-def _get_backup_conf_directory(old_conf):
-  """
-  Calculates the conf.backup absolute directory given the /etc/<component>/conf location.
-  :param old_conf:  the old conf directory (ie /etc/<component>/conf)
-  :return:  the conf.backup absolute directory
-  """
-  old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
-  backup_dir = os.path.join(old_parent, "conf.backup")
-  return backup_dir
\ No newline at end of file
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
index c16dd22..4ad83e0 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
@@ -24,7 +24,6 @@ __all__ = ["get_stack_tool", "get_stack_tool_name", "get_stack_tool_path",
 # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 import ambari_simplejson as json
 
-from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.core.utils import pad
 from resource_management.libraries.functions import stack_settings
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 2211baf..c208731 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -93,8 +93,6 @@ def get_path_from_configuration(name, configuration):
 
   return configuration
 
-def get_config_lock_file():
-  return os.path.join(Script.get_tmp_dir(), "link_configs_lock_file")
 
 class Script(object):
   instance = None
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py
index a4733c0..97b0dc5 100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py
@@ -18,8 +18,6 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.hook import Hook
-from shared_initialization import link_configs
-from shared_initialization import setup_config
 from shared_initialization import setup_stack_symlinks
 
 class AfterInstallHook(Hook):
@@ -29,9 +27,7 @@ class AfterInstallHook(Hook):
 
     env.set_params(params)
     setup_stack_symlinks(self.stroutfile)
-    # setup_config()
-    #
-    # link_configs(self.stroutfile)
+
 
 if __name__ == "__main__":
   AfterInstallHook().execute()
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
index f8eb445..42ec7d5 100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@ -22,7 +22,6 @@ import os
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.constants import LOGFEEDER_CONF_DIR
 from resource_management.libraries.script import Script
-from resource_management.libraries.script.script import get_config_lock_file
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format_jvm_option import format_jvm_option_value
@@ -113,7 +112,6 @@ if has_namenode or dfs_type == 'HCFS':
       mount_table_xml_inclusion_file_full_path = os.path.join(hadoop_conf_dir, xml_inclusion_file_name)
       mount_table_content = mount_table['content']
 
-link_configs_lock_file = get_config_lock_file()
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")
 
 upgrade_suspended = execution_command.is_upgrade_suspended()
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
index 073cdb6..99ba28d 100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
@@ -123,21 +123,3 @@ def load_version(struct_out_file):
     return json_info['version']
   except (IOError, KeyError, TypeError):
     return None
-
-
-def link_configs(struct_out_file):
-  """
-  Use the conf_select module to link configuration directories correctly.
-  """
-  import params
-
-  json_version = load_version(struct_out_file)
-
-  if not json_version:
-    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
-    return
-
-  # On parallel command execution this should be executed by a single process at a time.
-  with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-    for package_name, directories in conf_select.get_package_dirs().iteritems():
-      conf_select.convert_conf_directories_to_symlinks(package_name, json_version, directories)
\ No newline at end of file
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index c03a10f..a5298f7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -24,7 +24,6 @@ import json
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.script import Script
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
@@ -66,138 +65,6 @@ class TestHookAfterInstall(RMFTestCase):
                               create_parents = True)
     self.assertNoMoreResources()
 
-  @patch("os.path.isdir", new = MagicMock(return_value = True))
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
-  @patch("resource_management.libraries.functions.conf_select.create")
-  @patch("resource_management.libraries.functions.conf_select.select")
-  @patch("os.symlink")
-  @patch("shutil.rmtree")
-  def test_hook_default_conf_select(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):
-
-    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
-      return "/etc/{0}/{1}/0".format(arg2, arg3)
-
-    conf_select_create_mock.side_effect = mocked_conf_select
-
-    config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
-    with open(config_file, "r") as f:
-      json_content = json.load(f)
-
-    version = '2.3.0.0-1234'
-    json_content['commandParams']['version'] = version
-    json_content['clusterLevelParams']['stack_version'] = "2.3"
-
-    self.executeScript("after-INSTALL/scripts/hook.py",
-                       classname="AfterInstallHook",
-                       command="hook",
-                       stack_version = self.STACK_VERSION,
-                       target=RMFTestCase.TARGET_STACK_HOOKS,
-                       config_dict = json_content,
-                       config_overrides = self.CONFIG_OVERRIDES)
-
-
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
-      sudo = True)
-
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
-      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
-      xml_include_file=None)
-
-    self.assertResourceCalled('Directory',
-                              '/usr/lib/ambari-logsearch-logfeeder/conf',
-                              mode = 0755,
-                              cd_access = 'a',
-                              create_parents = True)
-
-    package_dirs = conf_select.get_package_dirs();
-    for package, dir_defs in package_dirs.iteritems():
-      for dir_def in dir_defs:
-        conf_dir = dir_def['conf_dir']
-        conf_backup_dir = conf_dir + ".backup"
-        current_dir = dir_def['current_dir']
-        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
-            not_if = 'test -e ' + conf_backup_dir,
-            sudo = True,)
-
-        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
-        self.assertResourceCalled('Link', conf_dir, to = current_dir,)
-
-    self.assertNoMoreResources()
-
-  @patch("os.path.isdir", new = MagicMock(return_value = True))
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
-  @patch("resource_management.libraries.functions.conf_select.create")
-  @patch("resource_management.libraries.functions.conf_select.select")
-  @patch("os.symlink")
-  @patch("shutil.rmtree")
-  def test_hook_default_conf_select_with_error(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):
-
-    def mocked_conf_select(arg1, arg2, arg3, dry_run = False, ignore_errors = False):
-      if arg2 == "pig" and not dry_run:
-        if not ignore_errors:
-          raise Exception("whoops")
-        else:
-          return None
-      return "/etc/{0}/{1}/0".format(arg2, arg3)
-
-    conf_select_create_mock.side_effect = mocked_conf_select
-    conf_select_select_mock.side_effect = mocked_conf_select
-
-    config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
-    with open(config_file, "r") as f:
-      json_content = json.load(f)
-
-    version = '2.3.0.0-1234'
-    json_content['commandParams']['version'] = version
-    json_content['clusterLevelParams']['stack_version'] = "2.3"
-
-    self.executeScript("after-INSTALL/scripts/hook.py",
-                       classname="AfterInstallHook",
-                       command="hook",
-                       stack_version = self.STACK_VERSION,
-                       target=RMFTestCase.TARGET_STACK_HOOKS,
-                       config_dict = json_content,
-                       config_overrides = self.CONFIG_OVERRIDES)
-
-
-    self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
-      sudo = True)
-
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
-      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
-      xml_include_file=None)
-
-    self.assertResourceCalled('Directory',
-                              '/usr/lib/ambari-logsearch-logfeeder/conf',
-                              mode = 0755,
-                              cd_access = 'a',
-                              create_parents = True)
-
-    package_dirs = conf_select.get_package_dirs();
-    for package, dir_defs in package_dirs.iteritems():
-      for dir_def in dir_defs:
-        conf_dir = dir_def['conf_dir']
-        conf_backup_dir = conf_dir + ".backup"
-        current_dir = dir_def['current_dir']
-        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
-            not_if = 'test -e ' + conf_backup_dir,
-            sudo = True,)
-
-        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
-        self.assertResourceCalled('Link', conf_dir, to = current_dir,)
-
-    self.assertNoMoreResources()
-
 
   @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.conf_select.create")
@@ -240,68 +107,6 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.3.0.0-1234'),
       sudo = True)
 
-  @patch("os.path.isdir", new = MagicMock(return_value = True))
-  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
-  @patch("resource_management.libraries.functions.conf_select.create")
-  @patch("resource_management.libraries.functions.conf_select.select")
-  @patch("os.symlink")
-  @patch("shutil.rmtree")
-  def test_hook_default_conf_select_suspended(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):
-
-    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
-      return "/etc/{0}/{1}/0".format(arg2, arg3)
-
-    conf_select_create_mock.side_effect = mocked_conf_select
-
-    config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
-    with open(config_file, "r") as f:
-      json_content = json.load(f)
-
-    version = '2.3.0.0-1234'
-    json_content['commandParams']['version'] = version
-    json_content['clusterLevelParams']['stack_version'] = "2.3"
-    json_content['roleParams']['upgrade_suspended'] = "true"
-
-    self.executeScript("after-INSTALL/scripts/hook.py",
-                       classname="AfterInstallHook",
-                       command="hook",
-                       stack_version = self.STACK_VERSION,
-                       target=RMFTestCase.TARGET_STACK_HOOKS,
-                       config_dict = json_content,
-                       config_overrides = self.CONFIG_OVERRIDES)
-
-    # same assertions as test_hook_default_conf_select, but skip hdp-select set all
-
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
-      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
-      xml_include_file=None)
-
-    self.assertResourceCalled('Directory',
-                              '/usr/lib/ambari-logsearch-logfeeder/conf',
-                              mode = 0755,
-                              cd_access = 'a',
-                              create_parents = True)
-
-    package_dirs = conf_select.get_package_dirs();
-    for package, dir_defs in package_dirs.iteritems():
-      for dir_def in dir_defs:
-        conf_dir = dir_def['conf_dir']
-        conf_backup_dir = conf_dir + ".backup"
-        current_dir = dir_def['current_dir']
-        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
-            not_if = 'test -e ' + conf_backup_dir,
-            sudo = True,)
-
-        self.assertResourceCalled('Directory', conf_dir, action = ['delete'],)
-        self.assertResourceCalled('Link', conf_dir, to = current_dir,)
-
-    self.assertNoMoreResources()
-
 
   @patch("resource_management.core.Logger.warning")
   @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1234"))
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
deleted file mode 100644
index a893d75..0000000
--- a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
+++ /dev/null
@@ -1,200 +0,0 @@
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import pprint
-import os
-from mock.mock import patch, MagicMock
-from stacks.utils.RMFTestCase import *
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.script import Script
-
-class TestConfSelect(RMFTestCase):
-
-  def setUp(self):
-    Logger.initialize_logger()
-
-    # required for the test to run since the Execute calls need this
-    from resource_management.core.environment import Environment
-    self.env = Environment(test_mode=True)
-    self.env.__enter__()
-
-    Script.config = dict()
-    Script.config.update( { "configurations" : { "cluster-env" : {} }, "clusterLevelParams": {} } )
-    Script.config["configurations"]["cluster-env"]["stack_packages"] = RMFTestCase.get_stack_packages()
-    Script.config["clusterLevelParams"] = { "stack_name" : "HDP" }
-
-
-  def tearDown(self):
-    self.env.__exit__(None,None,None)
-
-
-  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value=True))
-  def test_select_throws_error(self):
-    """
-    Tests that conf-select throws errors correctly
-    :return:
-    """
-    try:
-      conf_select.select("foo", "bar", "version", ignore_errors = False)
-      self.fail("Expected an error from conf-select")
-    except:
-      pass
-
-    conf_select.select("foo", "bar", "version", ignore_errors = True)
-
-
-  @patch("resource_management.core.shell.call")
-  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value=True))
-  @patch("resource_management.libraries.functions.stack_tools.get_stack_tool_path", new = MagicMock(return_value="/usr/bin/conf-select"))
-  def test_create_seeds_configuration_directories(self, shell_call_mock):
-    """
-    Tests that conf-select seeds new directories
-    :return:
-    """
-
-    def mock_call(command, **kwargs):
-      """
-      Instead of shell.call, call a command whose output equals the command.
-      :param command: Command that will be echoed.
-      :return: Returns a tuple of (process output code, stdout, stderr)
-      """
-      return (0, "/etc/foo/conf", None)
-
-    shell_call_mock.side_effect = mock_call
-    conf_select.create("HDP", "oozie", "version")
-
-    self.assertEqual(pprint.pformat(self.env.resource_list),
-      "[Directory['/etc/foo/conf'],\n "
-      "Execute['ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R -p -v /usr/hdp/current/oozie-client/conf/* /etc/foo/conf']]")
-
-
-  def test_symlink_conversion_bad_linkto(self):
-    """
-    Tests that a bad enum throws an exception.
-    :return:
-    """
-    packages = conf_select.get_package_dirs()
-
-    try:
-      conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234",
-        packages["hadoop"], link_to = "INVALID")
-      raise Exception("Expected failure when supplying a bad enum for link_to")
-    except:
-      pass
-
-
-  @patch("resource_management.core.shell.call")
-  @patch.object(os.path, "isdir")
-  @patch.object(os.path, "exists")
-  @patch.object(os.path, "islink")
-  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value = True))
-  @patch("resource_management.libraries.functions.conf_select.create", new = MagicMock(return_value = ["/etc/hadoop/2.3.0.0-1234/0"]))
-  @patch("resource_management.libraries.functions.conf_select.select", new = MagicMock())
-  def test_symlink_conversion_to_current(self, islink_mock, path_mock, isdir_mock, shell_call_mock):
-    """
-    Tests that conf-select creates the correct symlink directories.
-    :return:
-    """
-
-    def mock_call(command, **kwargs):
-      """
-      Instead of shell.call, call a command whose output equals the command.
-      :param command: Command that will be echoed.
-      :return: Returns a tuple of (process output code, stdout, stderr)
-      """
-      return (0, "/etc/hadoop/conf", None)
-
-    def path_mock_call(path):
-      if path == "/etc/hadoop/conf":
-        return True
-
-      if path == "/etc/hadoop/2.3.0.0-1234/0":
-        return True
-
-      return False
-
-    def islink_mock_call(path):
-      if path == "/etc/hadoop/conf":
-        return False
-
-      return False
-
-    def isdir_mock_call(path):
-      if path == "/etc/hadoop/conf":
-        return True
-
-      return False
-
-
-    packages = conf_select.get_package_dirs()
-
-    path_mock.side_effect = path_mock_call
-    islink_mock.side_effect = islink_mock_call
-    shell_call_mock.side_effect = mock_call
-    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234", packages["hadoop"])
-
-    self.assertEqual(pprint.pformat(self.env.resource_list[0]), "Execute[('cp', '-R', '-p', u'/etc/hadoop/conf', u'/etc/hadoop/conf.backup')]")
-    self.assertEqual(pprint.pformat(self.env.resource_list[1]), "Directory['/etc/hadoop/conf']")
-    self.assertEqual(pprint.pformat(self.env.resource_list[2]), "Link['/etc/hadoop/conf']")
-
-
-  @patch.object(os.path, "exists", new = MagicMock(return_value = True))
-  @patch.object(os.path, "islink", new = MagicMock(return_value = True))
-  @patch.object(os, "readlink", new = MagicMock(return_value = "/etc/component/invalid"))
-  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value = True))
-  @patch("resource_management.libraries.functions.conf_select.create", new = MagicMock(return_value = ["/etc/hadoop/2.3.0.0-1234/0"]))
-  @patch("resource_management.libraries.functions.conf_select.select", new = MagicMock())
-  def test_symlink_conversion_relinks_wrong_link(self):
-    """
-    Tests that conf-select symlinking can detect a wrong directory
-    :return:
-    """
-    packages = conf_select.get_package_dirs()
-
-    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234",
-      packages["hadoop"])
-
-    self.assertEqual(pprint.pformat(self.env.resource_list),
-      "[Link['/etc/hadoop/conf'], Link['/etc/hadoop/conf']]")
-
-
-  @patch.object(os.path, "exists", new = MagicMock(return_value = False))
-  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value = True))
-  def test_symlink_noop(self):
-    """
-    Tests that conf-select symlinking does nothing if the directory doesn't exist
-    :return:
-    """
-    packages = conf_select.get_package_dirs()
-
-    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234", packages["hadoop"])
-
-    self.assertEqual(pprint.pformat(self.env.resource_list), "[]")
-
-
-  def test_restrictions(self):
-
-    Script.config.update({'roleParameters': {'cluster_version_summary': {'services': {'HIVE': {'upgrade': True}}}}})
-
-    restricted = conf_select.get_restricted_packages()
-    self.assertTrue("hive" in restricted)
-    self.assertTrue("hive-hcatalog" in restricted)
-    self.assertTrue("hive2" in restricted)
-    self.assertTrue("tez_hive2" in restricted)
-    self.assertTrue("hadoop" not in restricted)