You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2016/06/17 14:48:41 UTC

ambari git commit: AMBARI-16920: Follow up issue for Spark2 stack definition (Jeff Zhang via jluniya)

Repository: ambari
Updated Branches:
  refs/heads/trunk b6cb758b5 -> a6e2d9203


AMBARI-16920: Follow up issue for Spark2 stack definition (Jeff Zhang via jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a6e2d920
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a6e2d920
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a6e2d920

Branch: refs/heads/trunk
Commit: a6e2d9203092ef36d141c781938b086518db8385
Parents: b6cb758
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Fri Jun 17 07:48:09 2016 -0700
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Fri Jun 17 07:48:09 2016 -0700

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          |   6 +
 .../libraries/functions/copy_tarball.py         |   8 +-
 .../SPARK2/2.0.0/package/scripts/params.py      |   2 -
 .../2.0.0/package/scripts/spark_service.py      |   7 +-
 .../stacks/HDP/2.5/role_command_order.json      |   4 +-
 .../hooks/after-INSTALL/test_after_install.py   | 716 ++-----------------
 6 files changed, 63 insertions(+), 680 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a6e2d920/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 4eb0015..f98f1fc 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -145,6 +145,12 @@ _PACKAGE_DIRS = {
       "current_dir": "{0}/current/spark-client/conf".format(STACK_ROOT_PATTERN)
     }
   ],
+  "spark2": [
+    {
+      "conf_dir": "/etc/spark2/conf",
+      "current_dir": "{0}/current/spark2-client/conf".format(STACK_ROOT_PATTERN)
+    }
+  ],
   "sqoop": [
     {
       "conf_dir": "/etc/sqoop/conf",

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6e2d920/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 286df8d..ac4e5b8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -59,11 +59,13 @@ TARBALL_MAP = {
   "mapreduce": ("{0}/{1}/hadoop/mapreduce.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
                 "/{0}/apps/{1}/mapreduce/mapreduce.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
   "spark": ("{0}/{1}/spark/lib/spark-{2}-assembly.jar".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN, STACK_NAME_PATTERN),
-            "/{0}/apps/{1}/spark/spark-{0}-assembly.jar".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN))
+            "/{0}/apps/{1}/spark/spark-{0}-assembly.jar".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
+  "spark2": ("/tmp/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN),
+             "/{0}/apps/{1}/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN))
 }
 
 
-def _get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None):
+def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None):
   """
   For a given tarball name, get the source and destination paths to use.
   :param name: Tarball name
@@ -203,7 +205,7 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
   import params
 
   Logger.info("Called copy_to_hdfs tarball: {0}".format(name))
-  (success, source_file, dest_file) = _get_tarball_paths(name, use_upgrading_version_during_upgrade,
+  (success, source_file, dest_file) = get_tarball_paths(name, use_upgrading_version_during_upgrade,
                                                          custom_source_file, custom_dest_file)
 
   if not success:

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6e2d920/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index be99edd..a0d91c7 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -20,11 +20,9 @@ limitations under the License.
 
 
 import status_params
-from setup_spark import *
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions import conf_select, stack_select
-from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6e2d920/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
index c2385df..0be7270 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
@@ -24,7 +24,7 @@ from contextlib import closing
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs, get_tarball_paths
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import File, Execute
 from resource_management.libraries.functions.version import format_stack_version
@@ -42,7 +42,8 @@ def make_tarfile(output_filename, source_dir):
   if not os.path.exists(parent_dir):
     os.makedirs(parent_dir)
   with closing(tarfile.open(output_filename, "w:gz")) as tar:
-    tar.add(source_dir, arcname=os.path.basename(source_dir))
+    for file in os.listdir(source_dir):
+      tar.add(os.path.join(source_dir,file),arcname=file)
 
 
 def spark_service(name, upgrade_type=None, action=None):
@@ -57,7 +58,7 @@ def spark_service(name, upgrade_type=None, action=None):
     if effective_version and check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
       # create & copy spark2-hdp-yarn-archive.tar.gz to hdfs
       source_dir=params.spark_home+"/jars"
-      tmp_archive_file="/tmp/spark2/spark2-hdp-yarn-archive.tar.gz"
+      tmp_archive_file=get_tarball_paths("spark2")[1]
       make_tarfile(tmp_archive_file, source_dir)
       copy_to_hdfs("spark2", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       # create spark history directory

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6e2d920/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
index f6011b0..47ba050 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
@@ -15,7 +15,7 @@
   },
   "_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
   "optional_no_glusterfs": {
-    "SPARK2_JOBHISTORYSERVER-START" : ["NAMENODE-START"],
-    "SPARK2_THRIFTSERVER-START" : ["HIVE_SERVER-START"]
+    "SPARK2_JOBHISTORYSERVER-START" : ["NAMENODE-START", "DATANODE-START"],
+    "SPARK2_THRIFTSERVER-START" : ["NAMENODE-START", "DATANODE-START", "HIVE_SERVER-START"]
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6e2d920/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 6c7fe18..06a366e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -21,6 +21,7 @@ limitations under the License.
 import json
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions import conf_select
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
 class TestHookAfterInstall(RMFTestCase):
@@ -80,215 +81,21 @@ class TestHookAfterInstall(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
       only_if="ls /usr/hdp/current/hadoop-client/conf")
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
-        not_if = 'test -e /etc/ranger/kms/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
-        to = '/usr/hdp/current/ranger-kms/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive2/conf', '/etc/hive2/conf.backup'),
-                              not_if = 'test -e /etc/hive2/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hive2/conf',
-                            action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hive2/conf',
-                            to = '/usr/hdp/current/hive-server2-hive2/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
-                              not_if = 'test -e /etc/zookeeper/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/zookeeper/conf',
-                              to = '/usr/hdp/current/zookeeper-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
-                              not_if = 'test -e /etc/pig/conf.backup',
-                              sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/pig/conf',
-                            action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/pig/conf',
-                            to = '/usr/hdp/current/pig-client/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
-                              not_if = 'test -e /etc/tez/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/tez/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/tez/conf',
-                              to = '/usr/hdp/current/tez-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
-        not_if = 'test -e /etc/hive-webhcat/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
-        not_if = 'test -e /etc/hive-hcatalog/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/hive-webhcat/conf',
-        to = '/usr/hdp/current/hive-webhcat/etc/webhcat',)
-
-    self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
-        to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.backup'),
-        not_if = 'test -e /etc/hbase/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hbase/conf',
-        to = '/usr/hdp/current/hbase-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/tagsync/conf', '/etc/ranger/tagsync/conf.backup'),
-                              not_if = 'test -e /etc/ranger/tagsync/conf.backup',
-                              sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/ranger/tagsync/conf',
-                              action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/ranger/tagsync/conf',
-                              to = '/usr/hdp/current/ranger-tagsync/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
-                              not_if = 'test -e /etc/ranger/usersync/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
-                              to = '/usr/hdp/current/ranger-usersync/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
-                              not_if = 'test -e /etc/hadoop/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to = '/usr/hdp/current/hadoop-client/conf')
-
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.backup'),
-        not_if = 'test -e /etc/mahout/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/mahout/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/mahout/conf',
-        to = '/usr/hdp/current/mahout-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.backup'),
-        not_if = 'test -e /etc/storm/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/storm/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/storm/conf',
-        to = '/usr/hdp/current/storm-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
-        not_if = 'test -e /etc/ranger/admin/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
-        to = '/usr/hdp/current/ranger-admin/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.backup'),
-        not_if = 'test -e /etc/flume/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/flume/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/flume/conf',
-        to = '/usr/hdp/current/flume-server/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.backup'),
-                              not_if = 'test -e /etc/sqoop/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/sqoop/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/sqoop/conf',
-                              to = '/usr/hdp/current/sqoop-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
-        not_if = 'test -e /etc/accumulo/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/accumulo/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/accumulo/conf',
-        to = '/usr/hdp/current/accumulo-client/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
-        not_if = 'test -e /etc/phoenix/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/phoenix/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/phoenix/conf',
-        to = '/usr/hdp/current/phoenix-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
-        not_if = 'test -e /etc/storm-slider-client/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
-        to = '/usr/hdp/current/storm-slider-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.backup'),
-        not_if = 'test -e /etc/slider/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/slider/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/slider/conf',
-        to = '/usr/hdp/current/slider-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.backup'),
-        not_if = 'test -e /etc/oozie/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/oozie/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/oozie/conf',
-        to = '/usr/hdp/current/oozie-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.backup'),
-        not_if = 'test -e /etc/falcon/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/falcon/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/falcon/conf',
-        to = '/usr/hdp/current/falcon-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.backup'),
-                              not_if = 'test -e /etc/knox/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/knox/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/knox/conf',
-                              to = '/usr/hdp/current/knox-server/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.backup'),
-                              not_if = 'test -e /etc/kafka/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/kafka/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/kafka/conf',
-                              to = '/usr/hdp/current/kafka-broker/conf') 
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.backup'),
-        not_if = 'test -e /etc/hive/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hive/conf',
-        to = '/usr/hdp/current/hive-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
-        not_if = 'test -e /etc/spark/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/spark/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/spark/conf',
-        to = '/usr/hdp/current/spark-client/conf')
+    package_dirs = conf_select.get_package_dirs();
+    for package, dir_defs in package_dirs.iteritems():
+      for dir_def in dir_defs:
+        conf_dir = dir_def['conf_dir']
+        conf_backup_dir = conf_dir + ".backup"
+        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
+            not_if = 'test -e ' + conf_backup_dir,
+            sudo = True,)
+      for dir_def in dir_defs:
+        conf_dir = dir_def['conf_dir']
+        current_dir = dir_def['current_dir']
+        self.assertResourceCalled('Directory', conf_dir,
+            action = ['delete'],)
+        self.assertResourceCalled('Link', conf_dir,
+            to = current_dir,)
 
     self.assertNoMoreResources()
 
@@ -335,258 +142,21 @@ class TestHookAfterInstall(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
       only_if="ls /usr/hdp/current/hadoop-client/conf")
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
-        not_if = 'test -e /etc/ranger/kms/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
-        action = ['delete'],)
-
-    self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
-        to = '/usr/hdp/current/ranger-kms/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive2/conf', '/etc/hive2/conf.backup'),
-                              not_if = 'test -e /etc/hive2/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hive2/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hive2/conf',
-                              to = '/usr/hdp/current/hive-server2-hive2/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
-                              not_if = 'test -e /etc/zookeeper/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/zookeeper/conf',
-                              to = '/usr/hdp/current/zookeeper-client/conf')    
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
-        not_if = 'test -e /etc/pig/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/pig/conf',
-                              action=['delete'])
-
-    self.assertResourceCalled("Link", "/etc/pig/conf",
-                              to="/usr/hdp/current/pig-client/conf")
-
-    # pig fails, so no Directory/Link combo
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
-        not_if = 'test -e /etc/tez/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/tez/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/tez/conf',
-        to = '/usr/hdp/current/tez-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
-        not_if = 'test -e /etc/hive-webhcat/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
-        not_if = 'test -e /etc/hive-hcatalog/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
-        action = ['delete'],)
-
-    self.assertResourceCalled('Link', '/etc/hive-webhcat/conf',
-        to = '/usr/hdp/current/hive-webhcat/etc/webhcat',)
-
-    self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',
-        action = ['delete'],)
-
-    self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
-        to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.backup'),
-        not_if = 'test -e /etc/hbase/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/hbase/conf',
-        to = '/usr/hdp/current/hbase-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/tagsync/conf', '/etc/ranger/tagsync/conf.backup'),
-                              not_if = 'test -e /etc/ranger/tagsync/conf.backup',
-                              sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/ranger/tagsync/conf',
-                              action = ['delete'],)
-
-    self.assertResourceCalled('Link', '/etc/ranger/tagsync/conf',
-                              to = '/usr/hdp/current/ranger-tagsync/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
-                              not_if = 'test -e /etc/ranger/usersync/conf.backup',
-                              sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
-                              action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
-                              to = '/usr/hdp/current/ranger-usersync/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
-        not_if = 'test -e /etc/hadoop/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-        to = '/usr/hdp/current/hadoop-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.backup'),
-        not_if = 'test -e /etc/mahout/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/mahout/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/mahout/conf',
-        to = '/usr/hdp/current/mahout-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.backup'),
-        not_if = 'test -e /etc/storm/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/storm/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/storm/conf',
-        to = '/usr/hdp/current/storm-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
-        not_if = 'test -e /etc/ranger/admin/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
-        to = '/usr/hdp/current/ranger-admin/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.backup'),
-        not_if = 'test -e /etc/flume/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/flume/conf',
-        action = ['delete'],)
-
-    self.assertResourceCalled('Link', '/etc/flume/conf',
-        to = '/usr/hdp/current/flume-server/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.backup'),
-                              not_if = 'test -e /etc/sqoop/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/sqoop/conf',
-                            action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/sqoop/conf',
-                            to = '/usr/hdp/current/sqoop-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
-        not_if = 'test -e /etc/accumulo/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/accumulo/conf',
-        action = ['delete'],)
-
-    self.assertResourceCalled('Link', '/etc/accumulo/conf',
-        to = '/usr/hdp/current/accumulo-client/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
-        not_if = 'test -e /etc/phoenix/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/phoenix/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/phoenix/conf',
-        to = '/usr/hdp/current/phoenix-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
-        not_if = 'test -e /etc/storm-slider-client/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
-        to = '/usr/hdp/current/storm-slider-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.backup'),
-        not_if = 'test -e /etc/slider/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/slider/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/slider/conf',
-        to = '/usr/hdp/current/slider-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.backup'),
-        not_if = 'test -e /etc/oozie/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/oozie/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/oozie/conf',
-        to = '/usr/hdp/current/oozie-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.backup'),
-        not_if = 'test -e /etc/falcon/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/falcon/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/falcon/conf',
-        to = '/usr/hdp/current/falcon-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.backup'),
-                              not_if = 'test -e /etc/knox/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/knox/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/knox/conf',
-                              to = '/usr/hdp/current/knox-server/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.backup'),
-                              not_if = 'test -e /etc/kafka/conf.backup',
-                              sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/kafka/conf',
-                              action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/kafka/conf',
-                              to = '/usr/hdp/current/kafka-broker/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.backup'),
-        not_if = 'test -e /etc/hive/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/hive/conf',
-        to = '/usr/hdp/current/hive-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
-        not_if = 'test -e /etc/spark/conf.backup',
-        sudo = True)
-
-    self.assertResourceCalled('Directory', '/etc/spark/conf',
-        action = ['delete'])
-
-    self.assertResourceCalled('Link', '/etc/spark/conf',
-        to = '/usr/hdp/current/spark-client/conf')
+    package_dirs = conf_select.get_package_dirs();
+    for package, dir_defs in package_dirs.iteritems():
+      for dir_def in dir_defs:
+        conf_dir = dir_def['conf_dir']
+        conf_backup_dir = conf_dir + ".backup"
+        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
+            not_if = 'test -e ' + conf_backup_dir,
+            sudo = True,)
+      for dir_def in dir_defs:
+        conf_dir = dir_def['conf_dir']
+        current_dir = dir_def['current_dir']
+        self.assertResourceCalled('Directory', conf_dir,
+            action = ['delete'],)
+        self.assertResourceCalled('Link', conf_dir,
+            to = current_dir,)
 
     self.assertNoMoreResources()
 
@@ -666,214 +236,20 @@ class TestHookAfterInstall(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
       only_if="ls /usr/hdp/current/hadoop-client/conf")
 
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
-        not_if = 'test -e /etc/ranger/kms/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
-        to = '/usr/hdp/current/ranger-kms/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive2/conf', '/etc/hive2/conf.backup'),
-                              not_if = 'test -e /etc/hive2/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hive2/conf',
-                            action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hive2/conf',
-                            to = '/usr/hdp/current/hive-server2-hive2/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
-                              not_if = 'test -e /etc/zookeeper/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/zookeeper/conf',
-                              to = '/usr/hdp/current/zookeeper-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
-                              not_if = 'test -e /etc/pig/conf.backup',
-                              sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/pig/conf',
-                            action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/pig/conf',
-                            to = '/usr/hdp/current/pig-client/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
-                              not_if = 'test -e /etc/tez/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/tez/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/tez/conf',
-                              to = '/usr/hdp/current/tez-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
-        not_if = 'test -e /etc/hive-webhcat/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
-        not_if = 'test -e /etc/hive-hcatalog/conf.backup',
-        sudo = True,)
-
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/hive-webhcat/conf',
-        to = '/usr/hdp/current/hive-webhcat/etc/webhcat',)
-
-    self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
-        to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', '/etc/hbase/conf.backup'),
-        not_if = 'test -e /etc/hbase/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hbase/conf',
-        to = '/usr/hdp/current/hbase-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/tagsync/conf', '/etc/ranger/tagsync/conf.backup'),
-                              not_if = 'test -e /etc/ranger/tagsync/conf.backup',
-                              sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/ranger/tagsync/conf',
-                              action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/ranger/tagsync/conf',
-                              to = '/usr/hdp/current/ranger-tagsync/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
-                              not_if = 'test -e /etc/ranger/usersync/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
-                              to = '/usr/hdp/current/ranger-usersync/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
-                              not_if = 'test -e /etc/hadoop/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to = '/usr/hdp/current/hadoop-client/conf')
-
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/mahout/conf', '/etc/mahout/conf.backup'),
-        not_if = 'test -e /etc/mahout/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/mahout/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/mahout/conf',
-        to = '/usr/hdp/current/mahout-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', '/etc/storm/conf.backup'),
-        not_if = 'test -e /etc/storm/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/storm/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/storm/conf',
-        to = '/usr/hdp/current/storm-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
-        not_if = 'test -e /etc/ranger/admin/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
-        to = '/usr/hdp/current/ranger-admin/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', '/etc/flume/conf.backup'),
-        not_if = 'test -e /etc/flume/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/flume/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/flume/conf',
-        to = '/usr/hdp/current/flume-server/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', '/etc/sqoop/conf.backup'),
-                              not_if = 'test -e /etc/sqoop/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/sqoop/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/sqoop/conf',
-                              to = '/usr/hdp/current/sqoop-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
-        not_if = 'test -e /etc/accumulo/conf.backup',
-        sudo = True,)
-    self.assertResourceCalled('Directory', '/etc/accumulo/conf',
-        action = ['delete'],)
-    self.assertResourceCalled('Link', '/etc/accumulo/conf',
-        to = '/usr/hdp/current/accumulo-client/conf',)
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
-        not_if = 'test -e /etc/phoenix/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/phoenix/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/phoenix/conf',
-        to = '/usr/hdp/current/phoenix-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
-        not_if = 'test -e /etc/storm-slider-client/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
-        to = '/usr/hdp/current/storm-slider-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/slider/conf', '/etc/slider/conf.backup'),
-        not_if = 'test -e /etc/slider/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/slider/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/slider/conf',
-        to = '/usr/hdp/current/slider-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', '/etc/oozie/conf.backup'),
-        not_if = 'test -e /etc/oozie/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/oozie/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/oozie/conf',
-        to = '/usr/hdp/current/oozie-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/falcon/conf', '/etc/falcon/conf.backup'),
-        not_if = 'test -e /etc/falcon/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/falcon/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/falcon/conf',
-        to = '/usr/hdp/current/falcon-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', '/etc/knox/conf.backup'),
-                              not_if = 'test -e /etc/knox/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/knox/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/knox/conf',
-                              to = '/usr/hdp/current/knox-server/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', '/etc/kafka/conf.backup'),
-                              not_if = 'test -e /etc/kafka/conf.backup',
-                              sudo = True)
-    self.assertResourceCalled('Directory', '/etc/kafka/conf',
-                              action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/kafka/conf',
-                              to = '/usr/hdp/current/kafka-broker/conf') 
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', '/etc/hive/conf.backup'),
-        not_if = 'test -e /etc/hive/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/hive/conf',
-        to = '/usr/hdp/current/hive-client/conf')
-
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
-        not_if = 'test -e /etc/spark/conf.backup',
-        sudo = True)
-    self.assertResourceCalled('Directory', '/etc/spark/conf',
-        action = ['delete'])
-    self.assertResourceCalled('Link', '/etc/spark/conf',
-        to = '/usr/hdp/current/spark-client/conf')
+    package_dirs = conf_select.get_package_dirs();
+    for package, dir_defs in package_dirs.iteritems():
+      for dir_def in dir_defs:
+        conf_dir = dir_def['conf_dir']
+        conf_backup_dir = conf_dir + ".backup"
+        self.assertResourceCalled('Execute', ('cp', '-R', '-p', conf_dir, conf_backup_dir),
+            not_if = 'test -e ' + conf_backup_dir,
+            sudo = True,)
+      for dir_def in dir_defs:
+        conf_dir = dir_def['conf_dir']
+        current_dir = dir_def['current_dir']
+        self.assertResourceCalled('Directory', conf_dir,
+            action = ['delete'],)
+        self.assertResourceCalled('Link', conf_dir,
+            to = current_dir,)
 
     self.assertNoMoreResources()