You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2015/05/05 20:19:53 UTC

[3/3] ambari git commit: AMBARI-10894 - Agent Changes For Supporting HDP 2.3 Configuration Directories (jonathanhurley)

AMBARI-10894 - Agent Changes For Supporting HDP 2.3 Configuration Directories (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/03918cf3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/03918cf3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/03918cf3

Branch: refs/heads/trunk
Commit: 03918cf3a653ae8d4871adf9a48ff5f77859027b
Parents: c465e9e
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue May 5 14:19:35 2015 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue May 5 14:19:35 2015 -0400

----------------------------------------------------------------------
 .../libraries/script/script.py                  |  72 ++++++++++
 .../1.6.1.2.2.0/package/scripts/params.py       |  34 +++--
 .../package/scripts/status_params.py            |  22 +++-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |   2 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |  23 ++--
 .../0.5.0.2.1/package/scripts/status_params.py  |  27 +++-
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |  22 ++--
 .../1.4.0.2.0/package/scripts/params_linux.py   |   9 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |   2 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  70 +++++-----
 .../0.96.0.2.0/package/scripts/status_params.py |  25 +++-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  65 ++++++----
 .../2.1.0.2.0/package/scripts/status_params.py  |  15 ++-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |   2 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  | 130 +++++++++----------
 .../0.12.0.2.0/package/scripts/status_params.py |  46 ++++++-
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   |  22 ++--
 .../0.8.1.2.2/package/scripts/status_params.py  |   4 +-
 .../KNOX/0.5.0.2.2/package/scripts/params.py    |  16 ++-
 .../0.5.0.2.2/package/scripts/params_linux.py   |  23 ++--
 .../0.5.0.2.2/package/scripts/status_params.py  |  12 +-
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  18 ++-
 .../4.0.0.2.0/package/scripts/params_linux.py   |  48 ++++---
 .../4.0.0.2.0/package/scripts/status_params.py  |  23 +++-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  30 +++--
 .../RANGER/0.4.0/package/scripts/params.py      |  18 ++-
 .../0.5.0.2.3/package/scripts/params.py         |   4 +-
 .../SLIDER/0.60.0.2.2/package/scripts/params.py |  20 +--
 .../0.60.0.2.2/package/scripts/params_linux.py  |   8 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  48 ++++---
 .../1.2.0.2.2/package/scripts/status_params.py  |   3 +-
 .../SQOOP/1.4.4.2.0/package/scripts/params.py   |   1 -
 .../1.4.4.2.0/package/scripts/params_linux.py   |  34 +++--
 .../0.9.1.2.1/package/scripts/params_linux.py   |  36 ++---
 .../0.9.1.2.1/package/scripts/status_params.py  |  31 ++++-
 .../0.4.0.2.1/package/scripts/params_linux.py   |  37 ++++--
 .../TEZ/0.4.0.2.1/package/scripts/tez.py        |  14 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  94 ++++++++------
 .../2.1.0.2.0/package/scripts/status_params.py  |   6 +
 .../3.4.5.2.0/package/scripts/params_linux.py   |  39 +++---
 .../3.4.5.2.0/package/scripts/status_params.py  |  22 +++-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  24 ++--
 .../scripts/shared_initialization.py            |   5 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  29 +++--
 .../before-ANY/scripts/shared_initialization.py |  33 +++--
 .../2.0.6/hooks/before-START/scripts/params.py  |  27 ++--
 .../python/stacks/2.0.6/FLUME/test_flume.py     |  26 ++--
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  30 +++--
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  28 ++--
 .../2.0.6/HBASE/test_hbase_service_check.py     |   4 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |  58 ++++-----
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  26 ++--
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   2 +-
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |  18 +--
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   8 +-
 .../ZOOKEEPER/test_zookeeper_service_check.py   |   2 +-
 .../hooks/after-INSTALL/test_after_install.py   |   5 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   |  11 +-
 .../python/stacks/2.1/STORM/test_storm_base.py  |  26 ++--
 .../2.1/STORM/test_storm_jaas_configuration.py  |   8 +-
 .../stacks/2.1/configs/secured-storm-start.json |   2 +-
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   4 +-
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |  28 ++--
 .../stacks/2.2/PIG/test_pig_service_check.py    |  12 +-
 .../stacks/2.2/RANGER/test_ranger_admin.py      |   8 +-
 .../stacks/2.2/SLIDER/test_slider_client.py     |   8 +-
 .../stacks/2.2/SPARK/test_job_history_server.py |  24 ++--
 .../stacks/2.2/SPARK/test_spark_client.py       |  20 +--
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   4 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |  10 +-
 70 files changed, 1010 insertions(+), 657 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index a4ddc7c..9f8653b 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -257,6 +257,78 @@ class Script(object):
     """
     return Script.tmp_dir
 
+  @staticmethod
+  def get_component_from_role(role_directory_map, default_role):
+    """
+    Gets the /usr/hdp/current/<component> component given an Ambari role,
+    such as DATANODE or HBASE_MASTER.
+    :return:  the component name, such as hbase-master
+    """
+    from resource_management.libraries.functions.default import default
+
+    command_role = default("/role", default_role)
+    if command_role in role_directory_map:
+      return role_directory_map[command_role]
+    else:
+      return role_directory_map[default_role]
+
+  @staticmethod
+  def get_stack_name():
+    """
+    Gets the name of the stack from hostLevelParams/stack_name.
+    :return: a stack name or None
+    """
+    from resource_management.libraries.functions.default import default
+    return default("/hostLevelParams/stack_name", None)
+
+  @staticmethod
+  def get_hdp_stack_version():
+    """
+    Gets the normalized version of the HDP stack in the form #.#.#.# if it is
+    present on the configurations sent.
+    :return: a normalized HDP stack version or None
+    """
+    stack_name = Script.get_stack_name()
+    if stack_name is None or stack_name.upper() != "HDP":
+      return None
+
+    config = Script.get_config()
+    if 'hostLevelParams' not in config or 'stack_version' not in config['hostLevelParams']:
+      return None
+
+    stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+    if stack_version_unformatted is None or stack_version_unformatted == '':
+      return None
+
+    return format_hdp_stack_version(stack_version_unformatted)
+
+  @staticmethod
+  def is_hdp_stack_greater_or_equal(compare_to_version):
+    """
+    Gets whether the hostLevelParams/stack_version, after being normalized,
+    is greater than or equal to the specified stack version
+    :param compare_to_version: the version to compare to
+    :return: True if the command's stack is greater than the specified version
+    """
+    hdp_stack_version = Script.get_hdp_stack_version()
+    if hdp_stack_version is None or hdp_stack_version == "":
+      return False
+
+    return compare_versions(hdp_stack_version, compare_to_version) >= 0
+
+  @staticmethod
+  def is_hdp_stack_less_than(compare_to_version):
+    """
+    Gets whether the hostLevelParams/stack_version, after being normalized,
+    is less than the specified stack version
+    :param compare_to_version: the version to compare to
+    :return: True if the command's stack is less than the specified version
+    """
+    hdp_stack_version = Script.get_hdp_stack_version()
+    if hdp_stack_version is None:
+      return False
+
+    return compare_versions(hdp_stack_version, compare_to_version) < 0
 
   def install(self, env):
     """

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 340f247..4fc9bed 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -17,10 +17,12 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management import *
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 import status_params
 
 # server configurations
@@ -33,23 +35,29 @@ security_enabled = status_params.security_enabled
 # hdp version
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-has_secure_user_auth = True
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') == 0:
-  has_secure_user_auth = False
 
-# accumulo local directory structure
-log_dir = config['configurations']['accumulo-env']['accumulo_log_dir']
-conf_dir = status_params.conf_dir # "/etc/accumulo/conf"
-server_conf_dir = "/etc/accumulo/conf/server"
-client_script = "/usr/hdp/current/accumulo-client/bin/accumulo"
-daemon_script = format("ACCUMULO_CONF_DIR={server_conf_dir} {client_script}")
+has_secure_user_auth = False
+if Script.is_hdp_stack_greater_or_equal("2.3"):
+  has_secure_user_auth = True
+
+# configuration directories
+conf_dir = status_params.conf_dir
+server_conf_dir = status_params.server_conf_dir
 
 # service locations
 hadoop_prefix = "/usr/hdp/current/hadoop-client"
 hadoop_bin_dir = format("{hadoop_prefix}/bin")
-hadoop_conf_dir = "/etc/hadoop/conf"
 zookeeper_home = "/usr/hdp/current/zookeeper-client"
 
+# the configuration direction for HDFS/YARN/MapR is the hadoop config
+# directory, which is symlinked by hadoop-client only
+hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+# accumulo local directory structure
+log_dir = config['configurations']['accumulo-env']['accumulo_log_dir']
+client_script = "/usr/hdp/current/accumulo-client/bin/accumulo"
+daemon_script = format("ACCUMULO_CONF_DIR={server_conf_dir} {client_script}")
+
 # user and status
 accumulo_user = status_params.accumulo_user
 user_group = config['configurations']['cluster-env']['user_group']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/status_params.py
index a7a6408..45dbb24 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/status_params.py
@@ -17,17 +17,33 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
 
-from resource_management import *
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'ACCUMULO_MASTER' : 'accumulo-master',
+  'ACCUMULO_MONITOR' : 'accumulo-monitor',
+  'ACCUMULO_GC' : 'accumulo-gc',
+  'ACCUMULO_TRACER' : 'accumulo-tracer',
+  'ACCUMULO_TSERVER' : 'accumulo-tablet',
+  'ACCUMULO_CLIENT' : 'accumulo-client'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "ACCUMULO_CLIENT")
 
 config = Script.get_config()
 
-conf_dir = "/etc/accumulo/conf"
+conf_dir = format('/usr/hdp/current/{component_directory}/conf')
+server_conf_dir = format('{conf_dir}/server')
 pid_dir = config['configurations']['accumulo-env']['accumulo_pid_dir']
 accumulo_user = config['configurations']['accumulo-env']['accumulo_user']
 
 # Security related/required params
 hostname = config['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 tmp_dir = Script.get_tmp_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index 62fb1dd..ed9098c 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -42,7 +42,7 @@ def falcon(type, action = None):
               owner=params.falcon_user,
               recursive=True
     )
-    Directory(params.falcon_conf_dir_prefix,
+    Directory(params.etc_prefix_dir,
               mode=0755,
               recursive=True
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index cb13741..27ced1d 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -16,11 +16,14 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+import status_params
 
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management import *
-from status_params import *
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 config = Script.get_config()
 
@@ -31,9 +34,10 @@ version = default("/commandParams/version", None)
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+etc_prefix_dir = "/etc/falcon"
 
 # hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 
   # if this is a server action, then use the server binaries; smoke tests
@@ -53,12 +57,13 @@ else:
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_home = '/usr/lib/falcon'
 
-hadoop_conf_dir = "/etc/hadoop/conf"
-falcon_conf_dir_prefix = "/etc/falcon"
-falcon_conf_dir = format("{falcon_conf_dir_prefix}/conf")
+hadoop_conf_dir = status_params.hadoop_conf_dir
+falcon_conf_dir = status_params.falcon_conf_dir
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
-smoke_user =  config['configurations']['cluster-env']['smokeuser']
+smoke_user = config['configurations']['cluster-env']['smokeuser']
+
+server_pid_file = status_params.server_pid_file
 
 user_group = config['configurations']['cluster-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
@@ -93,7 +98,7 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser_principal =  config['configurations']['cluster-env']['smokeuser_principal_name']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
index c5478e5..23db4f7 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
@@ -16,10 +16,22 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management import *
 from ambari_commons import OSCheck
 
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'FALCON_SERVER' : 'falcon-server',
+  'FALCON_CLIENT' : 'falcon-client'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "FALCON_CLIENT")
+
 config = Script.get_config()
 
 if OSCheck.is_windows_family():
@@ -30,13 +42,16 @@ else:
   falcon_pid_dir = config['configurations']['falcon-env']['falcon_pid_dir']
   server_pid_file = format('{falcon_pid_dir}/falcon.pid')
 
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  falcon_conf_dir = "/etc/falcon/conf"
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+    falcon_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
+
   # Security related/required params
   hostname = config['hostname']
   security_enabled = config['configurations']['cluster-env']['security_enabled']
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
   tmp_dir = Script.get_tmp_dir()
-  falcon_conf_dir_prefix = "/etc/falcon"
-  falcon_conf_dir = format("{falcon_conf_dir_prefix}/conf")
   hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
   falcon_user = config['configurations']['falcon-env']['falcon_user']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
index 2bc2ff1..4b2666b 100644
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
@@ -16,11 +16,12 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.script.script import Script
 
 if OSCheck.is_windows_family():
   from params_windows import *
@@ -43,15 +44,16 @@ security_enabled = False
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop default parameters
+flume_bin = '/usr/bin/flume-ng'
+flume_hive_home = '/usr/lib/hive'
+flume_hcat_home = '/usr/lib/hive-hcatalog'
+
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   flume_bin = '/usr/hdp/current/flume-server/bin/flume-ng'
   flume_hive_home = '/usr/hdp/current/hive-metastore'
   flume_hcat_home = '/usr/hdp/current/hive-webhcat'
-else:
-  flume_bin = '/usr/bin/flume-ng'
-  flume_hive_home = '/usr/lib/hive'
-  flume_hcat_home = '/usr/lib/hive-hcatalog'
 
 java_home = config['hostLevelParams']['java_home']
 flume_log_dir = '/var/log/flume'

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py
index b5bf203..a7efbf6 100644
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py
@@ -16,16 +16,15 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-from ambari_commons import OSCheck
+from resource_management.libraries.functions import format
+from resource_management.libraries.script.script import Script
 
 # server configurations
 config = Script.get_config()
 
 flume_conf_dir = '/etc/flume/conf'
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  flume_conf_dir = '/usr/hdp/current/flume-server/conf'
 
 flume_user = 'flume'
 flume_group = 'flume'

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index 8ce244a..9a35a24 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -46,7 +46,7 @@ def hbase(name=None):
 def hbase(name=None):
   import params
 
-  Directory( params.hbase_conf_dir_prefix,
+  Directory( params.etc_prefix_dir,
       mode=0755
   )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index bd0a7b1..59ebb6c1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -17,14 +17,21 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+import status_params
+import json
 
-from ambari_commons.constants import AMBARI_SUDO_BINARY
 from functions import calc_xmn_from_xms
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management import *
-import status_params
-import json
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_unique_id_and_date
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+from resource_management.libraries.functions.substitute_vars import substitute_vars
 
 # server configurations
 config = Script.get_config()
@@ -32,32 +39,43 @@ exec_tmp_dir = Script.get_tmp_dir()
 sudo = AMBARI_SUDO_BINARY
 
 stack_name = default("/hostLevelParams/stack_name", None)
-
 version = default("/commandParams/version", None)
+component_directory = status_params.component_directory
+etc_prefix_dir = "/etc/hbase"
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop default parameters
+hadoop_bin_dir = "/usr/bin"
+hadoop_conf_dir = "/etc/hadoop/conf"
+daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
+region_mover = "/usr/lib/hbase/bin/region_mover.rb"
+region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
+hbase_cmd = "/usr/lib/hbase/bin/hbase"
+
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
   region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')
   hbase_cmd = format('/usr/hdp/current/hbase-client/bin/hbase')
-else:
-  hadoop_bin_dir = "/usr/bin"
-  daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
-  region_mover = "/usr/lib/hbase/bin/region_mover.rb"
-  region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
-  hbase_cmd = "/usr/lib/hbase/bin/hbase"
+
+  hbase_max_direct_memory_size  = config['configurations']['hbase-env']['hbase_max_direct_memory_size']
+
+  daemon_script=format("/usr/hdp/current/{component_directory}/bin/hbase-daemon.sh")
+  region_mover = format("/usr/hdp/current/{component_directory}/bin/region_mover.rb")
+  region_drainer = format("/usr/hdp/current/{component_directory}/bin/draining_servers.rb")
+  hbase_cmd = format("/usr/hdp/current/{component_directory}/bin/hbase")
+
+
+hbase_conf_dir = status_params.hbase_conf_dir
 
 # no symlink for phoenix-server at this point
 phx_daemon_script = '/usr/hdp/current/phoenix-server/bin/queryserver.py'
 
-hadoop_conf_dir = "/etc/hadoop/conf"
-hbase_conf_dir_prefix = "/etc/hbase"
-hbase_conf_dir = format("{hbase_conf_dir_prefix}/conf")
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = default("/commandParams/mark_draining_only",False)
 hbase_included_hosts = config['commandParams']['included_hosts']
@@ -82,8 +100,6 @@ regionserver_xmn_max = config['configurations']['hbase-env']['hbase_regionserver
 regionserver_xmn_percent = config['configurations']['hbase-env']['hbase_regionserver_xmn_ratio']
 regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max)
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  hbase_max_direct_memory_size  = config['configurations']['hbase-env']['hbase_max_direct_memory_size']
 
 pid_dir = status_params.pid_dir
 tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
@@ -117,7 +133,7 @@ else:
 smoke_test_user = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal =  config['configurations']['cluster-env']['smokeuser_principal_name']
 smokeuser_permissions = "RWXCA"
-service_check_data = functions.get_unique_id_and_date()
+service_check_data = get_unique_id_and_date()
 user_group = config['configurations']['cluster-env']["user_group"]
 
 if security_enabled:
@@ -131,7 +147,7 @@ regionserver_keytab_path = config['configurations']['hbase-site']['hbase.regions
 queryserver_keytab_path = config['configurations']['hbase-site']['phoenix.queryserver.keytab.file']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 hbase_user_keytab = config['configurations']['hbase-env']['hbase_user_keytab']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 if security_enabled:
   kinit_cmd = format("{kinit_path_local} -kt {hbase_user_keytab} {hbase_principal_name};")
 else:
@@ -165,16 +181,6 @@ HdfsDirectory = functools.partial(
   bin_dir = hadoop_bin_dir
 )
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  command_role = default("/role", "")
-  if command_role == "HBASE_MASTER" or command_role == "HBASE_REGIONSERVER":
-    role_root = "master" if command_role == "HBASE_MASTER" else "regionserver"
-
-    daemon_script=format("/usr/hdp/current/hbase-{role_root}/bin/hbase-daemon.sh")
-    region_mover = format("/usr/hdp/current/hbase-{role_root}/bin/region_mover.rb")
-    region_drainer = format("/usr/hdp/current/hbase-{role_root}/bin/draining_servers.rb")
-    hbase_cmd = format("/usr/hdp/current/hbase-{role_root}/bin/hbase")
-
 # ranger host
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 has_ranger_admin = not len(ranger_admin_hosts) == 0    
@@ -218,7 +224,7 @@ if has_ranger_admin:
   elif xa_audit_db_flavor.lower() == 'oracle':
     jdbc_jar_name = "ojdbc6.jar"
     jdbc_symlink_name = "oracle-jdbc-driver.jar"
-  elif nxa_audit_db_flavor.lower() == 'postgres':
+  elif xa_audit_db_flavor.lower() == 'postgres':
     jdbc_jar_name = "postgresql.jar"
     jdbc_symlink_name = "postgres-jdbc-driver.jar"
   elif xa_audit_db_flavor.lower() == 'sqlserver':

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
index 4ec91a4..084ee06 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
@@ -17,9 +17,22 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+from ambari_commons.os_check import OSCheck
 
-from resource_management import *
-from ambari_commons import OSCheck
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'HBASE_MASTER' : 'hbase-master',
+  'HBASE_REGIONSERVER' : 'hbase-regionserver',
+  'HBASE_CLIENT' : 'hbase-client'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HBASE_CLIENT")
 
 config = Script.get_config()
 
@@ -33,9 +46,9 @@ else:
   # Security related/required params
   hostname = config['hostname']
   security_enabled = config['configurations']['cluster-env']['security_enabled']
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
   tmp_dir = Script.get_tmp_dir()
 
-
-  hbase_conf_dir_prefix = "/etc/hbase"
-  hbase_conf_dir = format("{hbase_conf_dir_prefix}/conf")
+  hbase_conf_dir = "/etc/hbase/conf"
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    hbase_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index c492f60..d8d99d5 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -17,23 +17,29 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from ambari_commons.os_check import OSCheck
-from resource_management.libraries.functions.default import default
-from resource_management import *
 import status_params
 import utils
 import json
 import os
-import itertools
 import re
 
+from ambari_commons.os_check import OSCheck
+
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_klist_path
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+from resource_management.libraries.functions.format_jvm_option import format_jvm_option
+from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
+
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 stack_name = default("/hostLevelParams/stack_name", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
-
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
@@ -53,13 +59,27 @@ dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
 dfs_dn_ipc_address = config['configurations']['hdfs-site']['dfs.datanode.ipc.address']
 secure_dn_ports_are_in_use = False
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop default parameters
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+hadoop_bin = "/usr/lib/hadoop/sbin"
+hadoop_bin_dir = "/usr/bin"
+hadoop_home = "/usr/lib/hadoop"
+hadoop_secure_dn_user = hdfs_user
+hadoop_conf_dir = "/etc/hadoop/conf"
+
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = "/usr/hdp/current/hadoop-client"
+
+  # the configuration direction for HDFS/YARN/MapR is the hadoop config
+  # directory, which is symlinked by hadoop-client only
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
   if not security_enabled:
     hadoop_secure_dn_user = '""'
   else:
@@ -77,18 +97,17 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
       hadoop_secure_dn_user = hdfs_user
     else:
       hadoop_secure_dn_user = '""'
-else:
-  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-  hadoop_bin = "/usr/lib/hadoop/sbin"
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = "/usr/lib/hadoop"
-  hadoop_secure_dn_user = hdfs_user
 
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+
+
 limits_conf_dir = "/etc/security/limits.d"
 
+if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+  jsvc_path = "/usr/lib/bigtop-utils"
+
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited ; "
 
@@ -102,8 +121,8 @@ hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
 exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 
-klist_path_local = functions.get_klist_path(default('/configurations/kerberos-env/executable_search_paths', None))
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+klist_path_local = get_klist_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 #hosts
 hostname = config["hostname"]
 rm_host = default("/clusterHostInfo/rm_host", [])
@@ -298,12 +317,6 @@ hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 java_home = config['hostLevelParams']['java_home']
 java_version = int(config['hostLevelParams']['java_version'])
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and not OSCheck.is_suse_family():
-  # deprecated rhel jsvc_path
-  jsvc_path = "/usr/libexec/bigtop-utils"
-else:
-  jsvc_path = "/usr/lib/bigtop-utils"
-
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
 namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
@@ -366,7 +379,7 @@ if has_ranger_admin:
   elif xa_audit_db_flavor.lower() == 'oracle':
     jdbc_jar_name = "ojdbc6.jar"
     jdbc_symlink_name = "oracle-jdbc-driver.jar"
-  elif nxa_audit_db_flavor.lower() == 'postgres':
+  elif xa_audit_db_flavor.lower() == 'postgres':
     jdbc_jar_name = "postgresql.jar"
     jdbc_symlink_name = "postgres-jdbc-driver.jar"
   elif xa_audit_db_flavor.lower() == 'sqlserver':

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
index 3e4cfe4..7918eb5 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
@@ -17,9 +17,13 @@ limitations under the License.
 
 """
 
-from resource_management import *
 from ambari_commons import OSCheck
 
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
 config = Script.get_config()
 
 if OSCheck.is_windows_family():
@@ -44,6 +48,13 @@ else:
   security_enabled = config['configurations']['cluster-env']['security_enabled']
   hdfs_user_principal = config['configurations']['hadoop-env']['hdfs_principal_name']
   hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+
   hadoop_conf_dir = "/etc/hadoop/conf"
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    # the configuration direction for HDFS/YARN/MapR is the hadoop config
+    # directory, which is symlinked by hadoop-client only
+    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+
+  kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
   tmp_dir = Script.get_tmp_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 19852f5..bfd4e74 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -94,7 +94,7 @@ def hive(name=None):
     setup_custom_scratchdir()
     params.HdfsDirectory(None, action="create")
 
-  Directory(params.hive_conf_dir_prefix,
+  Directory(params.hive_etc_dir_prefix,
             mode=0755
   )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index ee79a34..38bdced 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -18,15 +18,21 @@ limitations under the License.
 
 """
 
-from ambari_commons.constants import AMBARI_SUDO_BINARY
-from ambari_commons.os_check import OSCheck
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
 import status_params
 import json
 import os
 
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+from ambari_commons.os_check import OSCheck
+
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.get_port_from_url import get_port_from_url
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+
 # server configurations
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -40,76 +46,61 @@ hostname = config["hostname"]
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
+stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 
-# Hadoop params
-# TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
-  # start out with client libraries
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-  hadoop_home = '/usr/hdp/current/hadoop-client'
-  hive_bin = '/usr/hdp/current/hive-client/bin'
-  hive_lib = '/usr/hdp/current/hive-client/lib'
+hadoop_bin_dir = "/usr/bin"
+hadoop_home = '/usr'
+hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
+hive_bin = '/usr/lib/hive/bin'
+hive_lib = '/usr/lib/hive/lib/'
+hive_var_lib = '/var/lib/hive'
+pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
+hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
+sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
+hive_specific_configs_supported = False
+hive_etc_dir_prefix = "/etc/hive"
+limits_conf_dir = "/etc/security/limits.d"
+hcat_conf_dir = '/etc/hcatalog/conf'
+config_dir = '/etc/hcatalog/conf'
+hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+
+# use the directories from status_params as they are already calculated for
+# the correct version of HDP
+hadoop_conf_dir = status_params.hadoop_conf_dir
+webhcat_conf_dir = status_params.webhcat_conf_dir
+hive_conf_dir = status_params.hive_conf_dir
+hive_config_dir = status_params.hive_config_dir
+hive_client_conf_dir = status_params.hive_client_conf_dir
+hive_server_conf_dir = status_params.hive_server_conf_dir
 
-  # if this is a server action, then use the server binaries; smoke tests
-  # use the client binaries
-  command_role = default("/role", "")
-  server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
-    'HIVE_METASTORE' : 'hive-metastore' }
+if Script.is_hdp_stack_greater_or_equal("2.1"):
+  hcat_conf_dir = '/etc/hive-hcatalog/conf'
+  config_dir = '/etc/hive-webhcat/conf'
+  hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
-  if command_role in server_role_dir_mapping:
-    hive_server_root = server_role_dir_mapping[command_role]
-    hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
-    hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  hive_specific_configs_supported = True
+
+  component_directory = status_params.component_directory
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_home = '/usr/hdp/current/hadoop-client'
+  hive_bin = format('/usr/hdp/current/{component_directory}/bin')
+  hive_lib = format('/usr/hdp/current/{component_directory}/lib')
 
   # there are no client versions of these, use server versions directly
   hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
 
-  hive_specific_configs_supported = True
-else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = '/usr'
-  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  hive_bin = '/usr/lib/hive/bin'
-  hive_lib = '/usr/lib/hive/lib/'
-  pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
-  hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
-  sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
-
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
-    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-  # for newer versions
-  else:
-    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-    
-  hive_specific_configs_supported = False
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-hive_conf_dir_prefix = "/etc/hive"
-hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
-hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
-hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
-limits_conf_dir = "/etc/security/limits.d"
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-  hcat_conf_dir = '/etc/hcatalog/conf'
-  config_dir = '/etc/hcatalog/conf'
-# for newer versions
-else:
-  hcat_conf_dir = '/etc/hive-hcatalog/conf'
-  config_dir = '/etc/hive-webhcat/conf'
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
-webhcat_conf_dir = status_params.webhcat_conf_dir
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
 #HACK Temporarily use dbType=azuredb while invoking schematool
@@ -145,15 +136,16 @@ templeton_port = config['configurations']['webhcat-site']['templeton.port']
 hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
 hive_metastore_host = hive_metastore_hosts[0]
 hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
-hive_var_lib = '/var/lib/hive'
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
 hive_server_hosts = config['clusterHostInfo']['hive_server_host']
 hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
+
 if hive_transport_mode.lower() == "http":
   hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
 else:
   hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
+
 hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
 hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
 hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
@@ -167,7 +159,7 @@ smokeuser_principal = config['configurations']['cluster-env']['smokeuser_princip
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
@@ -177,17 +169,13 @@ hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
 hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
 hive_pid_dir = status_params.hive_pid_dir
 hive_pid = status_params.hive_pid
+
 #Default conf dir for client
 hive_conf_dirs_list = [hive_client_conf_dir]
 
 if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
   hive_conf_dirs_list.append(hive_server_conf_dir)
 
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  hive_config_dir = hive_server_conf_dir
-else:
-  hive_config_dir = hive_client_conf_dir
-
 #hive-site
 hive_database_name = config['configurations']['hive-env']['hive_database_name']
 hive_database = config['configurations']['hive-env']['hive_database']
@@ -223,7 +211,6 @@ else:
 java64_home = config['hostLevelParams']['java_home']
 
 ##### MYSQL
-
 db_name = config['configurations']['hive-env']['hive_database_name']
 mysql_group = 'mysql'
 mysql_host = config['clusterHostInfo']['hive_mysql_host']
@@ -232,13 +219,12 @@ mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
 mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
 
 ######## Metastore Schema
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
-  init_metastore_schema = False
-else:
+init_metastore_schema = False
+if Script.is_hdp_stack_greater_or_equal("2.1"):
   init_metastore_schema = True
 
-########## HCAT
 
+########## HCAT
 hcat_dbroot = hcat_lib
 
 hcat_user = config['configurations']['hive-env']['hcat_user']
@@ -353,7 +339,7 @@ HdfsDirectory = functools.partial(
 # ranger host
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 has_ranger_admin = not len(ranger_admin_hosts) == 0
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   enable_ranger_hive = (config['configurations']['ranger-hive-plugin-properties']['ranger-hive-plugin-enabled'].lower() == 'yes')
 
 #ranger hive properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index 5dcdbf5..ae960b7 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -18,8 +18,24 @@ limitations under the License.
 
 """
 
-from resource_management import *
-from ambari_commons.os_check import OSCheck
+from ambari_commons import OSCheck
+
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'HIVE_METASTORE' : 'hive-metastore',
+  'HIVE_SERVER' : 'hive-server2',
+  'WEBHCAT_SERVER' : 'hive-webhcat',
+  'HIVE_CLIENT' : 'hive-client',
+  'HCAT' : 'hive-client'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HIVE_CLIENT")
 
 config = Script.get_config()
 
@@ -46,11 +62,29 @@ else:
   # Security related/required params
   hostname = config['hostname']
   security_enabled = config['configurations']['cluster-env']['security_enabled']
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
   tmp_dir = Script.get_tmp_dir()
   hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
   hive_user = config['configurations']['hive-env']['hive_user']
-  hive_conf_dir = "/etc/hive/conf"
   webhcat_user = config['configurations']['hive-env']['webhcat_user']
-  webhcat_conf_dir = '/etc/hive-webhcat/conf'
\ No newline at end of file
+
+  # default configuration directories
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  webhcat_conf_dir = '/etc/hive-webhcat/conf'
+  hive_etc_dir_prefix = "/etc/hive"
+  hive_conf_dir = "/etc/hive/conf"
+  hive_client_conf_dir = "/etc/hive/conf"
+  hive_server_conf_dir = "/etc/hive/conf.server"
+
+  # HDP 2.2+
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+    webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
+    hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
+    hive_client_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
+    hive_server_conf_dir = format("/usr/hdp/current/{component_directory}/conf/conf.server")
+
+
+  hive_config_dir = hive_client_conf_dir
+  if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
+    hive_config_dir = hive_server_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
index e9f8f56..fcb0092 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
@@ -17,10 +17,10 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+from resource_management.libraries.functions import format
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management.core.logger import Logger
 
 import status_params
 
@@ -35,19 +35,23 @@ host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-    kafka_home = '/usr/hdp/current/kafka-broker/'
-    kafka_bin = kafka_home+'bin/kafka'
-else:
-    kafka_home = '/usr/lib/kafka/'
-    kafka_bin = kafka_home+'/bin/kafka'
+# default kafka parameters
+kafka_home = '/usr/lib/kafka/'
+kafka_bin = kafka_home+'/bin/kafka'
+conf_dir = "/etc/kafka/conf"
+
+# parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  kafka_home = '/usr/hdp/current/kafka-broker/'
+  kafka_bin = kafka_home+'bin/kafka'
+  conf_dir = "/usr/hdp/current/kafka-broker/conf"
 
 
-conf_dir = "/etc/kafka/conf"
 kafka_user = config['configurations']['kafka-env']['kafka_user']
 kafka_log_dir = config['configurations']['kafka-env']['kafka_log_dir']
 kafka_pid_dir = status_params.kafka_pid_dir
 kafka_pid_file = kafka_pid_dir+"/kafka.pid"
+
 # This is hardcoded on the kafka bash process lifecycle on which we have no control over
 kafka_managed_pid_dir = "/var/run/kafka"
 kafka_managed_log_dir = "/var/log/kafka"

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/status_params.py
index fcb0816..57bdf5e 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/status_params.py
@@ -17,8 +17,8 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management import *
+from resource_management.libraries.functions import format
+from resource_management.libraries.script.script import Script
 
 config = Script.get_config()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
index 3c3db5b..d2a7983 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
@@ -18,13 +18,15 @@ limitations under the License.
 Ambari Agent
 
 """
-
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
 import status_params
-import json
+
 from ambari_commons import OSCheck
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.get_port_from_url import get_port_from_url
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
 
 if OSCheck.is_windows_family():
   from params_windows import *
@@ -130,7 +132,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 if security_enabled:
   knox_keytab_path = config['configurations']['knox-env']['knox_keytab_path']
   _hostname_lowercase = config['hostname'].lower()
@@ -174,7 +176,7 @@ if has_ranger_admin:
   elif xa_audit_db_flavor.lower() == 'oracle':
     jdbc_jar_name = "ojdbc6.jar"
     jdbc_symlink_name = "oracle-jdbc-driver.jar"
-  elif nxa_audit_db_flavor.lower() == 'postgres':
+  elif xa_audit_db_flavor.lower() == 'postgres':
     jdbc_jar_name = "postgresql.jar"
     jdbc_symlink_name = "postgres-jdbc-driver.jar"
   elif xa_audit_db_flavor.lower() == 'sqlserver':

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 701bcd4..1347760 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -16,11 +16,10 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management import *
-from ambari_commons import OSCheck
+from resource_management.libraries.script.script import Script
 
 # server configurations
 config = Script.get_config()
@@ -31,19 +30,23 @@ knox_cert_store_path = '/var/lib/knox/data/security/keystores/gateway.jks'
 knox_user = default("/configurations/knox-env/knox_user", "knox")
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+
+# default parameters
+knox_bin = '/usr/bin/gateway'
+knox_conf_dir = '/etc/knox/conf'
+ldap_bin = '/usr/lib/knox/bin/ldap.sh'
+knox_client_bin = '/usr/lib/knox/bin/knoxcli.sh'
+
+# HDP 2.2+ parameters
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   knox_bin = '/usr/hdp/current/knox-server/bin/gateway.sh'
+  knox_conf_dir = '/usr/hdp/current/knox-server/conf'
   ldap_bin = '/usr/hdp/current/knox-server/bin/ldap.sh'
   knox_client_bin = '/usr/hdp/current/knox-server/bin/knoxcli.sh'
-else:
-  knox_bin = '/usr/bin/gateway'
-  ldap_bin = '/usr/lib/knox/bin/ldap.sh'
-  knox_client_bin = '/usr/lib/knox/bin/knoxcli.sh'
 
 knox_group = default("/configurations/knox-env/knox_group", "knox")
 mode = 0644
 
 # server configurations
-knox_conf_dir = '/etc/knox/conf'
 knox_data_dir = '/var/lib/knox/data'
 knox_logs_dir = '/var/log/knox'

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
index 52e9c59..bdf4b04 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
@@ -17,12 +17,17 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
-from resource_management import *
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
 
 config = Script.get_config()
 
 knox_conf_dir = '/etc/knox/conf'
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  knox_conf_dir = '/usr/hdp/current/knox-server/conf'
+
 knox_pid_dir = config['configurations']['knox-env']['knox_pid_dir']
 knox_pid_file = format("{knox_pid_dir}/gateway.pid")
 ldap_pid_file = format("{knox_pid_dir}/ldap.pid")
@@ -34,7 +39,8 @@ if security_enabled:
 else:
     knox_keytab_path = None
     knox_principal_name = None
+
 hostname = config['hostname'].lower()
 knox_user = default("/configurations/knox-env/knox_user", "knox")
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 temp_dir = Script.get_tmp_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index b37a3a8..5e0096f 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -18,9 +18,12 @@ limitations under the License.
 Ambari Agent
 
 """
-
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management import *
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -36,15 +39,18 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 version = default("/commandParams/version", None)
 
 #mahout params
-mahout_conf_dir = "/etc/mahout/conf"
 mahout_home = "/usr/hdp/current/mahout-client"
+mahout_conf_dir = "/usr/hdp/current/mahout-client/conf"
 mahout_user = config['configurations']['mahout-env']['mahout_user']
 
 #hadoop params
 hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 hadoop_home = '/usr/hdp/current/hadoop-client'
 
-hadoop_conf_dir = "/etc/hadoop/conf"
+# the configuration direction for HDFS/YARN/MapR is the hadoop config
+# directory, which is symlinked by hadoop-client only
+hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
@@ -53,7 +59,7 @@ smokeuser_principal = config['configurations']['cluster-env']['smokeuser_princip
 user_group = config['configurations']['cluster-env']['user_group']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 4be8a50..fb213b1 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -17,21 +17,17 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
 from ambari_commons.constants import AMBARI_SUDO_BINARY
-from resource_management import *
-from resource_management.core import System
-from resource_management.libraries import Script
-from resource_management.libraries.functions import default
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
-from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.resources import HdfsDirectory
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_port_from_url
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
+from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 
 import status_params
-import itertools
 import os
 
 # server configurations
@@ -50,21 +46,13 @@ stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 #hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   # start out assuming client libraries
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
 
-  # if this is a server action, then use the server binaries; smoke tests
-  # use the client binaries
-  server_role_dir_mapping = { 'OOZIE_SERVER' : 'oozie-server',
-                              'OOZIE_SERVICE_CHECK' : 'oozie-client' }
-
-  command_role = default("/role", "")
-  if command_role not in server_role_dir_mapping:
-    command_role = 'OOZIE_SERVICE_CHECK'
-
-  oozie_root = server_role_dir_mapping[command_role]
+  # oozie-server or oozie-client, depending on role
+  oozie_root = status_params.component_directory
 
   # using the correct oozie root dir, format the correct location
   oozie_lib_dir = format("/usr/hdp/current/{oozie_root}")
@@ -78,6 +66,13 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   oozie_home = format("/usr/hdp/current/{oozie_root}")
   oozie_bin_dir = format("/usr/hdp/current/{oozie_root}/bin")
   falcon_home = '/usr/hdp/current/falcon-client'
+
+  conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
+  hive_conf_dir = format("{conf_dir}/action-conf/hive")
+
+  # the configuration direction for HDFS/YARN/MapR is the hadoop config
+  # directory, which is symlinked by hadoop-client only
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_lib_home = "/usr/lib/hadoop/lib"
@@ -91,12 +86,12 @@ else:
   oozie_home = "/usr/lib/oozie"
   oozie_bin_dir = "/usr/bin"
   falcon_home = '/usr/lib/falcon'
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  conf_dir = "/etc/oozie/conf"
+  hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
 
 execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
 
-hadoop_conf_dir = "/etc/hadoop/conf"
-conf_dir = "/etc/oozie/conf"
-hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
@@ -122,7 +117,8 @@ oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.Had
 oozie_principal = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
 http_principal = config['configurations']['oozie-site']['oozie.authentication.kerberos.principal']
 oozie_site = config['configurations']['oozie-site']
-if security_enabled and hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') < 0:
+
+if security_enabled and Script.is_hdp_stack_less_than("2.2"):
   #older versions of oozie have problems when using _HOST in principal
   oozie_site = dict(config['configurations']['oozie-site'])
   oozie_site['oozie.service.HadoopAccessorService.kerberos.principal'] = \
@@ -147,7 +143,7 @@ oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oo
 oozie_server_admin_port = config['configurations']['oozie-env']['oozie_admin_port']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0:
+if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2"):
   put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
 # for newer
 else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
index e318c77..8457e5d 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
@@ -18,8 +18,21 @@ limitations under the License.
 
 """
 
-from resource_management import *
-from ambari_commons import OSCheck
+from ambari_commons.os_check import OSCheck
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'OOZIE_SERVER' : 'oozie-server',
+  'OOZIE_CLIENT' : 'oozie-client',
+  'OOZIE_SERVICE_CHECK' : 'oozie-client'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "OOZIE_CLIENT")
 
 config = Script.get_config()
 
@@ -31,8 +44,12 @@ else:
   pid_file = format("{oozie_pid_dir}/oozie.pid")
 
   security_enabled = config['configurations']['cluster-env']['security_enabled']
-  kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+  kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+
   conf_dir = "/etc/oozie/conf"
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    conf_dir = format("/usr/hdp/current/{component_directory}/conf")
+
   tmp_dir = Script.get_tmp_dir()
   oozie_user = config['configurations']['oozie-env']['oozie_user']
   hostname = config["hostname"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index af3e18f..c4fb033 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -18,9 +18,12 @@ limitations under the License.
 Ambari Agent
 
 """
-
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management import *
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -34,18 +37,21 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop default parameters
+pig_conf_dir = "/etc/pig/conf"
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_bin_dir = "/usr/bin"
+hadoop_home = '/usr'
+pig_bin_dir = ""
+
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  pig_conf_dir = "/usr/hdp/current/pig-client/conf"
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
-else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = '/usr'
-  pig_bin_dir = ""
 
-hadoop_conf_dir = "/etc/hadoop/conf"
-pig_conf_dir = "/etc/pig/conf"
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
@@ -54,7 +60,7 @@ smokeuser_principal = config['configurations']['cluster-env']['smokeuser_princip
 user_group = config['configurations']['cluster-env']['user_group']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 pig_env_sh_template = config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index e2b8ba8..befad8d 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -17,11 +17,21 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+from resource_management.libraries.functions import format
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'RANGER_ADMIN' : 'ranger-admin',
+  'RANGER_USERSYNC' : 'ranger-usersync'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "RANGER_ADMIN")
+
 config  = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
@@ -32,12 +42,12 @@ host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
-stack_is_hdp23_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.3') >= 0
+stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")
+stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
 
 if stack_is_hdp22_or_further:
   ranger_home    = '/usr/hdp/current/ranger-admin'
-  ranger_conf    = '/etc/ranger/admin/conf'
+  ranger_conf    = '/usr/hdp/current/ranger-admin/conf'
   ranger_stop    = '/usr/bin/ranger-admin-stop'
   ranger_start   = '/usr/bin/ranger-admin-start'
   usersync_home  = '/usr/hdp/current/ranger-usersync'

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 409610a..7a6d1eb 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -18,7 +18,7 @@ limitations under the License.
 
 """
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 
@@ -31,7 +31,7 @@ version = default("/commandParams/version", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-stack_is_hdp23_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.3') >= 0
+stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
 
 if stack_is_hdp23_or_further:
   kms_home = '/usr/hdp/current/ranger-kms'