You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2016/03/15 15:19:50 UTC

ambari git commit: AMBARI-15419 - After EU Some Services Fail To Start Because of Missing JKS Files (jonathanhurley)

Repository: ambari
Updated Branches:
  refs/heads/trunk bc2719a77 -> 1816cfb99


AMBARI-15419 - After EU Some Services Fail To Start Because of Missing JKS Files (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1816cfb9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1816cfb9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1816cfb9

Branch: refs/heads/trunk
Commit: 1816cfb998f41de649d97cbf5ef45a1d3429d37f
Parents: bc2719a
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Mar 14 23:38:21 2016 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Mar 15 10:14:02 2016 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 172 +++++++++++++------
 .../custom_actions/scripts/install_packages.py  |   3 +-
 .../hooks/after-INSTALL/test_after_install.py   |  55 +++++-
 .../stacks/2.2/common/test_conf_select.py       |  72 ++++++++
 4 files changed, 248 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1816cfb9/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 59c717b..b970353 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -204,6 +204,7 @@ def create(stack_name, package, version, dry_run = False):
   :param stack_name: the name of the stack
   :param package: the name of the package, as-used by conf-select
   :param version: the version number to create
+  :param dry_run: False to create the versioned config directory, True to only return what would be created
   :return List of directories created
   """
   Logger.info("Checking if need to create versioned conf dir /etc/{0}/{1}/0".format(package, version))
@@ -217,62 +218,75 @@ def create(stack_name, package, version, dry_run = False):
 
   # conf-select can set more than one directory
   # per package, so return that list, especially for dry_run
-  dirs = []
+  # > conf-select dry-run-create --package hive-hcatalog --stack-version 2.4.0.0-169 0
+  # /etc/hive-webhcat/2.4.0.0-169/0
+  # /etc/hive-hcatalog/2.4.0.0-169/0
+  created_directories = []
   if 0 == code and stdout is not None: # just be sure we have a stdout
     for line in stdout.splitlines():
-      dirs.append(line.rstrip('\n'))
+      created_directories.append(line.rstrip('\n'))
 
-  # take care of permissions
-  if not code and stdout and command == "create-conf-dir":
-    for d in dirs:
-      Directory(d,
-          mode=0755,
-          cd_access='a',
-          create_parents=True)
+  # if directories were created, then do some post-processing
+  if not code and stdout and not dry_run:
+    # take care of permissions if directories were created
+    for directory in created_directories:
+      Directory(directory, mode=0755, cd_access='a', create_parents=True)
 
-  return dirs
+    # seed the new directories with configurations from the old (current) directories
+    _seed_new_configuration_directories(package, created_directories)
 
+  return created_directories
 
-def select(stack_name, package, version, try_create=True):
+
+def select(stack_name, package, version, try_create=True, ignore_errors=False):
   """
   Selects a config version for the specified package.
-  :stack_name: the name of the stack
-  :package: the name of the package, as-used by conf-select
-  :version: the version number to create
-  :try_create: optional argument to attempt to create the directory before setting it
+  :param stack_name: the name of the stack
+  :param package: the name of the package, as-used by conf-select
+  :param version: the version number to create
+  :param try_create: optional argument to attempt to create the directory before setting it
+  :param ignore_errors: optional argument to ignore any error and simply log a warning
   """
-  if not _valid(stack_name, package, version):
-    return
+  try:
+    if not _valid(stack_name, package, version):
+      return
 
-  if try_create:
-    create(stack_name, package, version)
+    if try_create:
+      create(stack_name, package, version)
 
-  shell.checked_call(get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
+    shell.checked_call(get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
 
-  # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
-  # points to /usr/hdp/current/<component>/conf - this is because some people still prefer to
-  # use /etc/<component>/conf even though /usr/hdp is the "future"
-  if package in PACKAGE_DIRS:
-    Logger.info("Ensuring that {0} has the correct symlink structure".format(package))
+    # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
+    # points to /usr/hdp/current/<component>/conf - this is because some people still prefer to
+    # use /etc/<component>/conf even though /usr/hdp is the "future"
+    if package in PACKAGE_DIRS:
+      Logger.info("Ensuring that {0} has the correct symlink structure".format(package))
 
-    directory_list = PACKAGE_DIRS[package]
-    for directory_structure in directory_list:
-      conf_dir = directory_structure["conf_dir"]
-      current_dir = directory_structure["current_dir"]
+      directory_list = PACKAGE_DIRS[package]
+      for directory_structure in directory_list:
+        conf_dir = directory_structure["conf_dir"]
+        current_dir = directory_structure["current_dir"]
 
-      # if /etc/<component>/conf is not a symlink, we need to change it
-      if not os.path.islink(conf_dir):
-        # if it exists, try to back it up
-        if os.path.exists(conf_dir):
-          parent_directory = os.path.dirname(conf_dir)
-          conf_install_dir = os.path.join(parent_directory, "conf.backup")
+        # if /etc/<component>/conf is not a symlink, we need to change it
+        if not os.path.islink(conf_dir):
+          # if it exists, try to back it up
+          if os.path.exists(conf_dir):
+            parent_directory = os.path.dirname(conf_dir)
+            conf_install_dir = os.path.join(parent_directory, "conf.backup")
 
-          Execute(("cp", "-R", "-p", conf_dir, conf_install_dir),
-            not_if = format("test -e {conf_install_dir}"), sudo = True)
+            Execute(("cp", "-R", "-p", conf_dir, conf_install_dir),
+              not_if = format("test -e {conf_install_dir}"), sudo = True)
 
-          Directory(conf_dir, action="delete")
+            Directory(conf_dir, action="delete")
+
+          Link(conf_dir, to = current_dir)
+  except Exception, exception:
+    if ignore_errors is True:
+      Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
+        str(exception)))
+    else:
+      raise
 
-        Link(conf_dir, to = current_dir)
 
 
 def get_hadoop_conf_dir(force_latest_on_upgrade=False):
@@ -376,7 +390,6 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
 
 
 def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to="current"):
-
   """
   Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package.
   If the package does not exist, then no work is performed.
@@ -384,11 +397,11 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
   - Creates a /etc/<component>/conf.backup directory
   - Copies all configs from /etc/<component>/conf to conf.backup
   - Removes /etc/<component>/conf
-  - Creates /etc/<component>/<version>/0
-  - Creates /usr/hdp/current/<component>-client/conf -> /etc/<component>/<version>/0
-  - Links /etc/<component>/conf to <something>
-  -- /etc/<component>/conf -> /usr/hdp/current/[component]-client/conf
-  -- /etc/<component>/conf -> /etc/<component>/conf.backup
+  - Creates /etc/<component>/<version>/0 via conf-select
+  - /usr/hdp/current/<component>-client/conf -> /etc/<component>/<version>/0 via conf-select
+  - Links /etc/<component>/conf to <something> depending on function paramter
+  -- /etc/<component>/conf -> /usr/hdp/current/[component]-client/conf (usually)
+  -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3)
 
   :param package: the package to create symlinks for (zookeeper, falcon, etc)
   :param version: the version number to use with conf-select (2.3.0.0-1234)
@@ -461,7 +474,7 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
 
   # /usr/hdp/current/[component] is already set to to the correct version, e.g., /usr/hdp/[version]/[component]
   
-  link_component_conf_to_versioned_config(package, version)
+  select("HDP", package, version, ignore_errors = True)
 
   # Symlink /etc/[component]/conf to /etc/[component]/conf.backup
   try:
@@ -486,11 +499,70 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
     Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error: {2}".format(package, link_to, e))
 
 
-def link_component_conf_to_versioned_config(package, version):
+def _seed_new_configuration_directories(package, created_directories):
   """
-  Make /usr/hdp/[version]/[component]/conf point to the versioned config.
+  Copies any files from the "current" configuration directory to the directories which were
+  newly created with conf-select. This function helps ensure that files which are not tracked
+  by Ambari will be available after performing a stack upgrade. Although old configurations
+  will be copied as well, they will be overwritten when the components are writing out their
+  configs after upgrade during their restart.
+
+  This function will catch all errors, logging them, but not raising an exception. This is to
+  prevent problems here from stopping and otherwise healthy upgrade.
+
+  :param package: the conf-select package name
+  :param created_directories: a list of directories that conf-select said it created
+  :return: None
   """
+  if package not in PACKAGE_DIRS:
+    Logger.warning("Unable to seed newly created configuration directories for {0} because it is an unknown component".format(package))
+    return
+
+  # seed the directories with any existing configurations
+  # this allows files which are not tracked by Ambari to be available after an upgrade
+  Logger.info("Seeding versioned configuration directories for {0}".format(package))
+  expected_directories = PACKAGE_DIRS[package]
+
   try:
-    select("HDP", package, version)
+    # if the expected directories don't match those created, we can't seed them
+    if len(created_directories) != len(expected_directories):
+      Logger.warning("The known configuration directories for {0} do not match those created by conf-select: {1}".format(
+        package, str(created_directories)))
+
+      return
+
+    # short circuit for a simple 1:1 mapping
+    if len(expected_directories) == 1:
+      # /usr/hdp/current/component/conf
+      # the current directory is the source of the seeded configurations;
+      source_seed_directory = expected_directories[0]["current_dir"]
+      target_seed_directory = created_directories[0]
+      _copy_configurations(source_seed_directory, target_seed_directory)
+    else:
+      for created_directory in created_directories:
+        for expected_directory_structure in expected_directories:
+          prefix = expected_directory_structure.get("prefix", None)
+          if prefix is not None and created_directory.startswith(prefix):
+            source_seed_directory = expected_directory_structure["current_dir"]
+            target_seed_directory = created_directory
+            _copy_configurations(source_seed_directory, target_seed_directory)
+
   except Exception, e:
-    Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package, e))
+    Logger.warning("Unable to seed new configuration directories for {0}. {1}".format(package, str(e)))
+
+
+def _copy_configurations(source_directory, target_directory):
+  """
+  Copies from the source directory to the target directory. If the source directory is a symlink
+  then it will be followed (deferenced) but any other symlinks found to copy will not be. This
+  will ensure that if the configuration directory itself is a symlink, then it's contents will be
+  copied, preserving and children found which are also symlinks.
+
+  :param source_directory:  the source directory to copy from
+  :param target_directory:  the target directory to copy to
+  :return: None
+  """
+  # append trailing slash so the cp command works correctly WRT recursion and symlinks
+  source_directory = os.path.join(source_directory, "*")
+  Execute(as_sudo(["cp", "-R", "-p", "-v", source_directory, target_directory], auto_escape = False),
+    logoutput = True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1816cfb9/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index 08bdcc3..e0f3752 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -199,9 +199,10 @@ class InstallPackages(Script):
         Logger.info("The current cluster stack of {0} does not require backing up configurations; "
                     "only conf-select versioned config directories will be created.".format(stack_version))
         # only link configs for all known packages
-        conf_select.link_component_conf_to_versioned_config(package_name, stack_version)
+        conf_select.select("HDP", package_name, stack_version, ignore_errors = True)
       else:
         # link configs and create conf.backup folders for all known packages
+        # this will also call conf-select select
         conf_select.convert_conf_directories_to_symlinks(package_name, stack_version, directories,
           skip_existing_links = False, link_to = "backup")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1816cfb9/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index daee726..749e73b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -283,13 +283,15 @@ class TestHookAfterInstall(RMFTestCase):
   @patch("shutil.rmtree")
   def test_hook_default_conf_select_with_error(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):
 
-    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
+    def mocked_conf_select(arg1, arg2, arg3, dry_run = False, ignore_errors = False):
       if arg2 == "pig" and not dry_run:
-        raise Exception("whoops")
+        if not ignore_errors:
+          raise Exception("whoops")
+        else:
+          return None
       return "/etc/{0}/{1}/0".format(arg2, arg3)
 
     conf_select_create_mock.side_effect = mocked_conf_select
-
     conf_select_select_mock.side_effect = mocked_conf_select
 
     config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
@@ -320,168 +322,209 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
         not_if = 'test -e /etc/ranger/kms/conf.backup',
         sudo = True,)
+
     self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
         action = ['delete'],)
+
     self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
         to = '/usr/hdp/current/ranger-kms/conf',)
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
         not_if = 'test -e /etc/zookeeper/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/zookeeper/conf',
         to = '/usr/hdp/current/zookeeper-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
         not_if = 'test -e /etc/pig/conf.backup',
         sudo = True,)
+
     self.assertResourceCalled('Directory', '/etc/pig/conf',
                               action=['delete'])
+
     self.assertResourceCalled("Link", "/etc/pig/conf",
                               to="/usr/hdp/current/pig-client/conf")
+
     # pig fails, so no Directory/Link combo
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
         not_if = 'test -e /etc/tez/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/tez/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/tez/conf',
         to = '/usr/hdp/current/tez-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
         not_if = 'test -e /etc/hive-webhcat/conf.backup',
         sudo = True,)
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
         not_if = 'test -e /etc/hive-hcatalog/conf.backup',
         sudo = True,)
 
     self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
         action = ['delete'],)
+
     self.assertResourceCalled('Link', '/etc/hive-webhcat/conf',
         to = '/usr/hdp/current/hive-webhcat/etc/webhcat',)
 
     self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',
         action = ['delete'],)
+
     self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
         to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.backup'),
         not_if = 'test -e /etc/hbase/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/hbase/conf',
         to = '/usr/hdp/current/hbase-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.backup'),
         not_if = 'test -e /etc/knox/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/knox/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/knox/conf',
         to = '/usr/hdp/current/knox-server/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
         not_if = 'test -e /etc/ranger/usersync/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
         to = '/usr/hdp/current/ranger-usersync/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
         not_if = 'test -e /etc/hadoop/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/hadoop/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/hadoop/conf',
         to = '/usr/hdp/current/hadoop-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.backup'),
         not_if = 'test -e /etc/mahout/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/mahout/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/mahout/conf',
         to = '/usr/hdp/current/mahout-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.backup'),
         not_if = 'test -e /etc/storm/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/storm/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/storm/conf',
         to = '/usr/hdp/current/storm-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
         not_if = 'test -e /etc/ranger/admin/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
         to = '/usr/hdp/current/ranger-admin/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.backup'),
         not_if = 'test -e /etc/flume/conf.backup',
         sudo = True,)
+
     self.assertResourceCalled('Directory', '/etc/flume/conf',
         action = ['delete'],)
+
     self.assertResourceCalled('Link', '/etc/flume/conf',
         to = '/usr/hdp/current/flume-server/conf',)
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.backup'),
         not_if = 'test -e /etc/sqoop/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/sqoop/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/sqoop/conf',
         to = '/usr/hdp/current/sqoop-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
         not_if = 'test -e /etc/accumulo/conf.backup',
         sudo = True,)
+
     self.assertResourceCalled('Directory', '/etc/accumulo/conf',
         action = ['delete'],)
+
     self.assertResourceCalled('Link', '/etc/accumulo/conf',
         to = '/usr/hdp/current/accumulo-client/conf',)
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
         not_if = 'test -e /etc/phoenix/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/phoenix/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/phoenix/conf',
         to = '/usr/hdp/current/phoenix-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
         not_if = 'test -e /etc/storm-slider-client/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
         to = '/usr/hdp/current/storm-slider-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.backup'),
         not_if = 'test -e /etc/slider/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/slider/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/slider/conf',
         to = '/usr/hdp/current/slider-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.backup'),
         not_if = 'test -e /etc/oozie/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/oozie/conf',
         to = '/usr/hdp/current/oozie-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.backup'),
         not_if = 'test -e /etc/falcon/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/falcon/conf',
         action = ['delete'])
     self.assertResourceCalled('Link', '/etc/falcon/conf',
@@ -490,24 +533,30 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
         not_if = 'test -e /etc/spark/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/spark/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/spark/conf',
         to = '/usr/hdp/current/spark-client/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.backup'),
         not_if = 'test -e /etc/kafka/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/kafka/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/kafka/conf',
         to = '/usr/hdp/current/kafka-broker/conf')
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.backup'),
         not_if = 'test -e /etc/hive/conf.backup',
         sudo = True)
+
     self.assertResourceCalled('Directory', '/etc/hive/conf',
         action = ['delete'])
+
     self.assertResourceCalled('Link', '/etc/hive/conf',
         to = '/usr/hdp/current/hive-client/conf')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1816cfb9/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
new file mode 100644
index 0000000..20f00e8
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
@@ -0,0 +1,72 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import pprint
+from mock.mock import patch, MagicMock
+from stacks.utils.RMFTestCase import *
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions import conf_select
+
+class TestConfSelect(RMFTestCase):
+
+  def setUp(self):
+    Logger.initialize_logger()
+
+    # required for the test to run since the Execute calls need this
+    from resource_management.core.environment import Environment
+    self.env = Environment(test_mode=True)
+    self.env._instances.append(self.env)
+
+
+  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value=True))
+  def test_select_throws_error(self):
+    """
+    Tests that conf-select throws errors correctly
+    :return:
+    """
+    try:
+      conf_select.select("foo", "bar", "version", ignore_errors = False)
+      self.fail("Expected an error from conf-select")
+    except:
+      pass
+
+    conf_select.select("foo", "bar", "version", ignore_errors = True)
+
+
+  @patch("resource_management.core.shell.call")
+  @patch("resource_management.libraries.functions.conf_select._valid", new = MagicMock(return_value=True))
+  def test_create_seeds_configuration_directories(self, shell_call_mock):
+    """
+    Tests that conf-select seeds new directories
+    :return:
+    """
+
+    def mock_call(command, **kwargs):
+      """
+      Instead of shell.call, call a command whose output equals the command.
+      :param command: Command that will be echoed.
+      :return: Returns a tuple of (process output code, stdout, stderr)
+      """
+      return (0, "/etc/foo/conf", None)
+
+    shell_call_mock.side_effect = mock_call
+    conf_select.create("HDP", "oozie", "version")
+
+    self.assertEqual(pprint.pformat(self.env.resource_list),
+      "[Directory['/etc/foo/conf'],\n "
+      "Execute['ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cp -R -p -v /usr/hdp/current/oozie-client/conf/* /etc/foo/conf']]")
\ No newline at end of file