You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/07/31 14:10:32 UTC

[17/25] ambari git commit: AMBARI-21570. Migrate custom extension support.(vbrodetskyi)

AMBARI-21570. Migrate custom extension support.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0a78ceab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0a78ceab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0a78ceab

Branch: refs/heads/branch-feature-AMBARI-21450
Commit: 0a78ceab8922cde53d7fc9b5a477bbad8e870a81
Parents: 9f6a5a6
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Jul 28 14:40:59 2017 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Jul 28 14:40:59 2017 +0300

----------------------------------------------------------------------
 .../libraries/functions/constants.py            |   3 +-
 .../libraries/functions/version.py              |  17 +-
 .../0.1.0/package/scripts/params_linux.py       |   6 +
 .../0.96.0.2.0/package/scripts/params_linux.py  |   2 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |   3 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   4 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |   2 +
 .../RANGER/0.4.0/package/scripts/params.py      |   5 +-
 .../0.5.0.2.3/package/scripts/params.py         |   5 +-
 .../SPARK/1.2.1/package/scripts/params.py       |   5 +
 .../SPARK2/2.0.0/package/scripts/params.py      |   3 +-
 .../TITAN/1.0.0/package/scripts/params.py       |   3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   4 +-
 .../0.6.0.2.5/package/scripts/params.py         |   3 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |   3 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |   4 +
 .../before-START/scripts/custom_extensions.py   | 174 +++++++++++++++++++
 .../2.0.6/hooks/before-START/scripts/hook.py    |   3 +
 .../2.0.6/hooks/before-START/scripts/params.py  |  12 +-
 .../HDP/2.0.6/properties/stack_features.json    |   5 +
 .../services/HBASE/configuration/hbase-env.xml  |  87 ++++++++++
 .../services/HBASE/configuration/hbase-site.xml |   8 +
 .../services/HDFS/configuration/core-site.xml   |  29 ++++
 .../services/HDFS/configuration/hadoop-env.xml  |   5 +
 .../services/HIVE/configuration/hive-env.xml    |   3 +-
 .../services/HIVE/configuration/hive-site.xml   |   9 +
 .../YARN/configuration-mapred/mapred-site.xml   |   9 +
 .../services/YARN/configuration/yarn-site.xml   |   2 +-
 28 files changed, 402 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 3b941ec..6ae71ef 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -50,6 +50,7 @@ class StackFeature:
   FALCON_EXTENSIONS = "falcon_extensions"
   DATANODE_NON_ROOT = "datanode_non_root"
   SECURE_ZOOKEEPER = "secure_zookeeper"
+  HADOOP_CUSTOM_EXTENSIONS = "hadoop_custom_extensions"
   REMOVE_RANGER_HDFS_PLUGIN_ENV = "remove_ranger_hdfs_plugin_env"
   RANGER = "ranger"
   RANGER_TAGSYNC_COMPONENT = "ranger_tagsync_component"
@@ -119,4 +120,4 @@ class StackFeature:
   SECURE_RANGER_SSL_PASSWORD = "secure_ranger_ssl_password"
   RANGER_KMS_SSL = "ranger_kms_ssl"
   KAFKA_ACL_MIGRATION_SUPPORT = "kafka_acl_migration_support"
-  ATLAS_CORE_SITE_SUPPORT="atlas_core_site_support"
\ No newline at end of file
+  ATLAS_CORE_SITE_SUPPORT="atlas_core_site_support"

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-common/src/main/python/resource_management/libraries/functions/version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version.py b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
index 2500430..7dfa6b8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
@@ -74,4 +74,19 @@ def compare_versions(version1, version2, format=False):
   v2 = version2 if not format else format_stack_version(version2)
 
   max_segments = max(len(v1.split(".")), len(v2.split(".")))
-  return cmp(_normalize(v1, desired_segments=max_segments), _normalize(v2, desired_segments=max_segments))
\ No newline at end of file
+  return cmp(_normalize(v1, desired_segments=max_segments), _normalize(v2, desired_segments=max_segments))
+
+def get_major_version(full_version):
+  """
+  :param input: Input string, e.g. "2.1.3.0" or "2.2.0.1-885" and other similar
+  :return: Returns a well-formatted HDP major stack version of the form #.# as a string. If can't get major version
+  return None
+  """
+  pattern = re.compile(r'^[0-9]+\.[0-9]+')
+  major_version = None
+
+  m = pattern.search(full_version)
+  if m:
+    major_version = m.group()
+
+  return major_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
index c2111b4..0ca1f0b 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
@@ -25,6 +25,7 @@ from resource_management.core.shell import as_user
 from ambari_commons import OSCheck
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 
 config = Script.get_config()
 
@@ -36,6 +37,11 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 ams_grafana_pid_dir = config['configurations']['ams-grafana-env']['metrics_grafana_pid_dir']
 
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+
+major_stack_version = get_major_version(stack_version_formatted)
+
 #hadoop params
 if rpm_version is not None:
   #RPM versioning support

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 6617a80..fa9bef2 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -45,6 +45,7 @@ from resource_management.libraries.functions.expect import expect
 from ambari_commons.ambari_metrics_helper import select_metric_collector_hosts_from_hostnames
 from resource_management.libraries.functions.setup_ranger_plugin_xml import get_audit_configs, generate_ranger_service_config
 from resource_management.libraries.functions.constants import Direction
+from resource_management.libraries.functions.version import get_major_version
 
 # server configurations
 config = Script.get_config()
@@ -60,6 +61,7 @@ etc_prefix_dir = "/etc/hbase"
 
 stack_version_unformatted = status_params.stack_version_unformatted
 stack_version_formatted = status_params.stack_version_formatted
+major_stack_version = get_major_version(stack_version_formatted)
 stack_root = status_params.stack_root
 
 # get the correct version to use for checking stack features

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 1d19175..20e86f7 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -31,7 +31,7 @@ from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries.functions import get_klist_path
@@ -56,6 +56,7 @@ stack_root = Script.get_stack_root()
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability']
 agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 65ba140..d46b6ce 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -45,6 +45,7 @@ from resource_management.libraries.functions.setup_atlas_hook import has_atlas_i
 from ambari_commons.ambari_metrics_helper import select_metric_collector_hosts_from_hostnames
 from resource_management.libraries.functions.setup_ranger_plugin_xml import get_audit_configs, generate_ranger_service_config
 from resource_management.libraries.functions.get_architecture import get_architecture
+from resource_management.libraries.functions.version import get_major_version
 
 from resource_management.core.utils import PasswordString
 from resource_management.core.shell import checked_call
@@ -81,6 +82,7 @@ stack_version_formatted_major = status_params.stack_version_formatted_major
 
 # this is not available on INSTALL action because <stack-selector-tool> is not available
 stack_version_formatted = functions.get_stack_version('hive-server2')
+major_stack_version = get_major_version(stack_version_formatted_major)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade.
 # It cannot be used during the initial Cluser Install because the version is not yet known.
@@ -849,4 +851,4 @@ if security_enabled:
 
 # For ldap - hive_check
 hive_ldap_user= config['configurations']['hive-env'].get('alert_ldap_username','')
-hive_ldap_passwd=config['configurations']['hive-env'].get('alert_ldap_password','')
\ No newline at end of file
+hive_ldap_passwd=config['configurations']['hive-env'].get('alert_ldap_password','')

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 110d55a..2f474a1 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -35,6 +35,7 @@ from resource_management.libraries.functions.expect import expect
 from resource_management.libraries.functions.get_architecture import get_architecture
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions.stack_tools import get_stack_name
+from resource_management.libraries.functions.version import get_major_version
 
 from resource_management.core.utils import PasswordString
 from ambari_commons.credential_store_helper import get_password_from_credential_store
@@ -74,6 +75,7 @@ source_stack_name = get_stack_name(source_stack)
 
 stack_version_unformatted =  status_params.stack_version_unformatted
 stack_version_formatted =  status_params.stack_version_formatted
+major_stack_version = get_major_version(stack_version_formatted)
 version_for_stack_feature_checks = get_stack_feature_version(config)
 
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 3789358..65ee1a7 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 import os
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.is_empty import is_empty
@@ -49,6 +49,7 @@ version = default("/commandParams/version", None)
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 
 upgrade_marker_file = format("{tmp_dir}/rangeradmin_ru.inprogress")
 
@@ -447,4 +448,4 @@ truststore_password = config['configurations']['ranger-admin-site']['ranger.trus
 # need this to capture cluster name for ranger tagsync
 cluster_name = config['clusterName']
 ranger_ldap_bind_auth_password = config['configurations']['ranger-admin-site']['ranger.ldap.bind.password']
-ranger_ad_bind_auth_password = config['configurations']['ranger-admin-site']['ranger.ldap.ad.bind.password']
\ No newline at end of file
+ranger_ad_bind_auth_password = config['configurations']['ranger-admin-site']['ranger.ldap.ad.bind.password']

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 633e930..93c7b88 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -20,7 +20,7 @@ limitations under the License.
 import os
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -43,6 +43,7 @@ upgrade_direction = default("/commandParams/upgrade_direction", None)
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
@@ -327,4 +328,4 @@ HdfsResource = functools.partial(
 )
 
 local_component_list = default("/localComponents", [])
-has_hdfs_client_on_node = 'HDFS_CLIENT' in local_component_list
\ No newline at end of file
+has_hdfs_client_on_node = 'HDFS_CLIENT' in local_component_list

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index 5dbbaed..9b813a1 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -36,6 +36,7 @@ from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 
 from resource_management.libraries.script.script import Script
 
@@ -54,6 +55,10 @@ component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP,
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
+
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 java_home = config['hostLevelParams']['java_home']
 stack_name = status_params.stack_name

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index efbec0f..1968b0e 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -30,7 +30,7 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.get_stack_version import get_stack_version
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
@@ -57,6 +57,7 @@ stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 
 sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/params.py
index 8019748..896b512 100755
--- a/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/TITAN/1.0.0/package/scripts/params.py
@@ -24,7 +24,7 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_stack_version import get_stack_version
@@ -37,6 +37,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 full_stack_version = get_stack_version('titan-client')
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index a4f7ee9..fad1fcb 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -30,7 +30,7 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries import functions
 from resource_management.libraries.functions import is_empty
@@ -71,7 +71,9 @@ version_for_stack_feature_checks = get_stack_feature_version(config)
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted_major = format_stack_version(stack_version_unformatted)
 stack_version_formatted = functions.get_stack_version('hadoop-yarn-resourcemanager')
+major_stack_version = get_major_version(stack_version_formatted_major)
 
 stack_supports_ru = check_stack_feature(StackFeature.ROLLING_UPGRADE, version_for_stack_feature_checks)
 stack_supports_timeline_state_store = check_stack_feature(StackFeature.TIMELINE_STATE_STORE, version_for_stack_feature_checks)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
index 3e6825b..0ac6dd4 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
@@ -29,7 +29,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.script.script import Script
 
@@ -200,6 +200,7 @@ stack_version_unformatted = config['hostLevelParams']['stack_version']
 
 # e.g. 2.3.0.0
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 
 # e.g. 2.3.0.0-2130
 full_stack_version = default("/commandParams/version", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 1782298..65c687d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -26,7 +26,7 @@ from resource_management.libraries.functions import default
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format_jvm_option
-from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -40,6 +40,7 @@ sudo = AMBARI_SUDO_BINARY
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 
 # current host stack version
 current_version = default("/hostLevelParams/current_version", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 4052d1d..1fd7f3e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -43,6 +43,8 @@ from ambari_commons.constants import AMBARI_SUDO_BINARY
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+stack_root = Script.get_stack_root()
+
 architecture = get_architecture()
 
 dfs_type = default("/commandParams/dfs_type", "")
@@ -53,6 +55,8 @@ java_home = config['hostLevelParams']['java_home']
 java_version = expect("/hostLevelParams/java_version", int)
 jdk_location = config['hostLevelParams']['jdk_location']
 
+hadoop_custom_extensions_enabled = default("/configurations/core-site/hadoop.custom-extensions.enabled", False)
+
 sudo = AMBARI_SUDO_BINARY
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/custom_extensions.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/custom_extensions.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/custom_extensions.py
new file mode 100644
index 0000000..63f6f70
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/custom_extensions.py
@@ -0,0 +1,174 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+
+from resource_management.core.resources import Directory
+from resource_management.core.resources import Execute
+from resource_management.libraries.functions import default
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import format
+
+
+DEFAULT_HADOOP_HDFS_EXTENSION_DIR = "/hdp/ext/{0}/hadoop"
+DEFAULT_HADOOP_HIVE_EXTENSION_DIR = "/hdp/ext/{0}/hive"
+DEFAULT_HADOOP_HBASE_EXTENSION_DIR = "/hdp/ext/{0}/hbase"
+
+def setup_extensions():
+  """
+  The goal of this method is to distribute extensions (for example jar files) from
+  HDFS (/hdp/ext/{major_stack_version}/{service_name}) to all nodes which contain related
+  components of service (YARN, HIVE or HBASE). Extensions should be added to HDFS by
+  user manually.
+  """
+
+  import params
+
+  # Hadoop Custom extensions
+  hadoop_custom_extensions_enabled = default("/configurations/core-site/hadoop.custom-extensions.enabled", False)
+  hadoop_custom_extensions_services = default("/configurations/core-site/hadoop.custom-extensions.services", "")
+  hadoop_custom_extensions_owner = default("/configurations/core-site/hadoop.custom-extensions.owner", params.hdfs_user)
+  hadoop_custom_extensions_hdfs_dir = get_config_formatted_value(default("/configurations/core-site/hadoop.custom-extensions.root",
+                                                 DEFAULT_HADOOP_HDFS_EXTENSION_DIR.format(params.major_stack_version)))
+  hadoop_custom_extensions_services = [ service.strip().upper() for service in hadoop_custom_extensions_services.split(",") ]
+  hadoop_custom_extensions_services.append("YARN")
+
+  hadoop_custom_extensions_local_dir = "{0}/current/ext/hadoop".format(Script.get_stack_root())
+
+  if params.current_service in hadoop_custom_extensions_services:
+    clean_extensions(hadoop_custom_extensions_local_dir)
+    if hadoop_custom_extensions_enabled:
+      download_extensions(hadoop_custom_extensions_owner, params.user_group,
+                          hadoop_custom_extensions_hdfs_dir,
+                          hadoop_custom_extensions_local_dir)
+
+  setup_extensions_hive()
+
+  hbase_custom_extensions_services = []
+  hbase_custom_extensions_services.append("HBASE")
+  if params.current_service in hbase_custom_extensions_services:
+    setup_hbase_extensions()
+
+
+def setup_hbase_extensions():
+  import params
+
+  # HBase Custom extensions
+  hbase_custom_extensions_enabled = default("/configurations/hbase-site/hbase.custom-extensions.enabled", False)
+  hbase_custom_extensions_owner = default("/configurations/hbase-site/hbase.custom-extensions.owner", params.hdfs_user)
+  hbase_custom_extensions_hdfs_dir = get_config_formatted_value(default("/configurations/hbase-site/hbase.custom-extensions.root",
+                                                DEFAULT_HADOOP_HBASE_EXTENSION_DIR.format(params.major_stack_version)))
+  hbase_custom_extensions_local_dir = "{0}/current/ext/hbase".format(Script.get_stack_root())
+
+  impacted_components = ['HBASE_MASTER', 'HBASE_REGIONSERVER', 'PHOENIX_QUERY_SERVER'];
+  role = params.config.get('role','')
+
+  if role in impacted_components:
+    clean_extensions(hbase_custom_extensions_local_dir)
+    if hbase_custom_extensions_enabled:
+      download_extensions(hbase_custom_extensions_owner, params.user_group,
+                          hbase_custom_extensions_hdfs_dir,
+                          hbase_custom_extensions_local_dir)
+
+
+def setup_extensions_hive():
+  import params
+
+  hive_custom_extensions_enabled = default("/configurations/hive-site/hive.custom-extensions.enabled", False)
+  hive_custom_extensions_owner = default("/configurations/hive-site/hive.custom-extensions.owner", params.hdfs_user)
+  hive_custom_extensions_hdfs_dir = get_config_formatted_value(default("/configurations/hive-site/hive.custom-extensions.root",
+                                               DEFAULT_HADOOP_HIVE_EXTENSION_DIR.format(params.major_stack_version)))
+
+  hive_custom_extensions_local_dir = "{0}/current/ext/hive".format(Script.get_stack_root())
+
+  impacted_components = ['HIVE_SERVER', 'HIVE_CLIENT'];
+  role = params.config.get('role','')
+
+  # Run copying for HIVE_SERVER and HIVE_CLIENT
+  if params.current_service == 'HIVE' and role in impacted_components:
+    clean_extensions(hive_custom_extensions_local_dir)
+    if hive_custom_extensions_enabled:
+      download_extensions(hive_custom_extensions_owner, params.user_group,
+                          hive_custom_extensions_hdfs_dir,
+                          hive_custom_extensions_local_dir)
+
+def download_extensions(owner_user, owner_group, hdfs_source_dir, local_target_dir):
+  """
+  :param owner_user: user owner of the HDFS directory
+  :param owner_group: group owner of the HDFS directory
+  :param hdfs_source_dir: the HDFS directory from where the files are being pull
+  :param local_target_dir: the location of where to download the files
+  :return: Will return True if successful, otherwise, False.
+  """
+  import params
+
+  if not os.path.isdir(local_target_dir):
+    extensions_tmp_dir=format("{tmp_dir}/custom_extensions")
+    Directory(local_target_dir,
+              owner="root",
+              mode=0755,
+              group="root",
+              create_parents=True)
+
+    params.HdfsResource(hdfs_source_dir,
+                        type="directory",
+                        action="create_on_execute",
+                        owner=owner_user,
+                        group=owner_group,
+                        mode=0755)
+
+    Directory(extensions_tmp_dir,
+              owner=params.hdfs_user,
+              mode=0755,
+              create_parents=True)
+
+    # copy from hdfs to /tmp
+    params.HdfsResource(extensions_tmp_dir,
+                        type="directory",
+                        action="download_on_execute",
+                        source=hdfs_source_dir,
+                        user=params.hdfs_user,
+                        mode=0644,
+                        replace_existing_files=True)
+
+    # Execute command is not quoting correctly.
+    cmd = format("{sudo} mv {extensions_tmp_dir}/* {local_target_dir}")
+    only_if_cmd = "ls -d {extensions_tmp_dir}/*".format(extensions_tmp_dir=extensions_tmp_dir)
+    Execute(cmd, only_if=only_if_cmd)
+
+    only_if_local = 'ls -d "{local_target_dir}"'.format(local_target_dir=local_target_dir)
+    Execute(("chown", "-R", "root:root", local_target_dir),
+            sudo=True,
+            only_if=only_if_local)
+
+    params.HdfsResource(None,action="execute")
+  return True
+
+def clean_extensions(local_dir):
+  """
+  :param local_dir: The local directory where the extensions are stored.
+  :return: Will return True if successful, otherwise, False.
+  """
+  if os.path.isdir(local_dir):
+    Directory(local_dir,
+              action="delete")
+  return True
+
+def get_config_formatted_value(property_value):
+  return format(property_value.replace("{{", "{").replace("}}", "}"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
index f7705c4..4cb276a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
@@ -21,6 +21,7 @@ import sys
 from resource_management import *
 from rack_awareness import create_topology_script_and_mapping
 from shared_initialization import setup_hadoop, setup_configs, create_javahome_symlink, setup_unlimited_key_jce_policy
+from custom_extensions import setup_extensions
 
 class BeforeStartHook(Hook):
 
@@ -35,6 +36,8 @@ class BeforeStartHook(Hook):
     create_javahome_symlink()
     create_topology_script_and_mapping()
     setup_unlimited_key_jce_policy()
+    if params.stack_supports_hadoop_custom_extensions:
+      setup_extensions()
 
 if __name__ == "__main__":
   BeforeStartHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 2e7f5cd..c2714e2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -24,17 +24,26 @@ from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import default
 from resource_management.libraries.functions import format_jvm_option
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions, get_major_version
 from ambari_commons.os_check import OSCheck
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
+from resource_management.libraries.functions import StackFeature
+from ambari_commons.constants import AMBARI_SUDO_BINARY
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 artifact_dir = tmp_dir + "/AMBARI-artifacts"
 
+version_for_stack_feature_checks = get_stack_feature_version(config)
+stack_supports_hadoop_custom_extensions = check_stack_feature(StackFeature.HADOOP_CUSTOM_EXTENSIONS, version_for_stack_feature_checks)
+
+sudo = AMBARI_SUDO_BINARY
+
 # Global flag enabling or disabling the sysprep feature
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
@@ -47,6 +56,7 @@ sysprep_skip_setup_jce = host_sys_prepped and default("/configurations/cluster-e
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
 
 dfs_type = default("/commandParams/dfs_type", "")
 hadoop_conf_dir = "/etc/hadoop/conf"

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index f2c9afc..973d9dd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -428,6 +428,11 @@
         "name": "atlas_core_site_support",
         "description": "Need to create core-site under Atlas conf directory.",
         "min_version": "2.6.0.0"
+      },
+      {
+        "name": "hadoop_custom_extensions",
+        "description": "Support hadoop custom extensions",
+        "min_version": "2.6.0.0"
       }
     ]
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-env.xml
index 9afaecb..7732ed7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-env.xml
@@ -34,4 +34,91 @@ and the -Xmn ratio (hbase_regionserver_xmn_ratio) exceeds this value.
     </value-attributes>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>content</name>
+    <display-name>hbase-env template</display-name>
+    <description>This is the jinja template for hbase-env.sh file</description>
+    <value>
+# Set environment variables here.
+
+# The java implementation to use. Java 1.6 required.
+export JAVA_HOME={{java64_home}}
+
+# HBase Configuration directory
+export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}
+
+# Extra Java CLASSPATH elements. Optional.
+export HBASE_CLASSPATH=${HBASE_CLASSPATH}:{{stack_root}}/current/ext/hbase/*
+
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HBASE_HEAPSIZE=1000
+
+# Extra Java runtime options.
+# Below are what we set by default. May only work with SUN JVM.
+# For more on why as well as other possible settings,
+# see http://wiki.apache.org/hadoop/PerformanceTuning
+export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`"
+# Uncomment below to enable java garbage collection logging.
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+# Uncomment and adjust to enable JMX exporting
+# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.
+# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
+#
+# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
+# If you want to configure BucketCache, specify '-XX: MaxDirectMemorySize=' with proper direct memory size
+# export HBASE_THRIFT_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
+# export HBASE_ZOOKEEPER_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
+
+# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
+export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers
+
+# Extra ssh options. Empty by default.
+# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
+
+# Where log files are stored. $HBASE_HOME/logs by default.
+export HBASE_LOG_DIR={{log_dir}}
+
+# A string representing this instance of hbase. $USER by default.
+# export HBASE_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes. See 'man nice'.
+# export HBASE_NICENESS=10
+
+# The directory where pid files are stored. /tmp by default.
+export HBASE_PID_DIR={{pid_dir}}
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HBASE_SLAVE_SLEEP=0.1
+
+# Tell HBase whether it should manage it's own instance of Zookeeper or not.
+export HBASE_MANAGES_ZK=false
+
+{% if java_version &lt; 8 %}
+JDK_DEPENDED_OPTS="-XX:PermSize=128m -XX:MaxPermSize=128m"
+{% endif %}
+
+{% if security_enabled %}
+export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
+export PHOENIX_QUERYSERVER_OPTS="$PHOENIX_QUERYSERVER_OPTS -Djava.security.auth.login.config={{queryserver_jaas_config_file}}"
+{% else %}
+export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} $JDK_DEPENDED_OPTS"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} $JDK_DEPENDED_OPTS"
+{% endif %}
+
+# HBase off-heap MaxDirectMemorySize
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS {% if hbase_max_direct_memory_size %} -XX:MaxDirectMemorySize={{hbase_max_direct_memory_size}}m {% endif %}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS {% if hbase_max_direct_memory_size %} -XX:MaxDirectMemorySize={{hbase_max_direct_memory_size}}m {% endif %}"
+    </value>
+    <value-attributes>
+      <type>content</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
index 715023b..9b69913 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
@@ -40,4 +40,12 @@
     </description>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>hbase.custom-extensions.root</name>
+    <value>/hdp/ext/{{major_stack_version}}/hbase</value>
+    <description>
+        Root directory for hbase extensions in HDFS
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/core-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/core-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/core-site.xml
new file mode 100644
index 0000000..2d01643
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/core-site.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+ -->
+<!-- Put site-specific property overrides in this file. -->
+<configuration xmlns:xi="http://www.w3.org/2001/XInclude" supports_final="true">
+    <property>
+        <name>hadoop.custom-extensions.root</name>
+        <value>/hdp/ext/{{major_stack_version}}/hadoop</value>
+        <description>
+            Root directory for hadoop extensions in HDFS
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
index 04b9304..3a7edb9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
@@ -157,6 +157,11 @@
       ulimit -n {{hdfs_user_nofile_limit}}
       fi
 
+      {% if hadoop_custom_extensions_enabled %}
+      #Enable custom extensions
+      export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:{{stack_root}}/current/ext/hadoop/*
+      {% endif %}
+
       # Enable ACLs on zookeper znodes if required
       {% if hadoop_zkfc_opts is defined %}
       export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}} $HADOOP_ZKFC_OPTS"

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-env.xml
index 929c10d..674d633 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-env.xml
@@ -94,8 +94,9 @@ export HIVE_HOME=${HIVE_HOME:-{{hive_home_dir}}}
 export HIVE_CONF_DIR=${HIVE_CONF_DIR:-{{hive_config_dir}}}
 
 # Folder containing extra libraries required for hive compilation/execution can be controlled by:
+export HIVE_AUX_JARS_PATH={{stack_root}}/current/ext/hive
 if [ "${HIVE_AUX_JARS_PATH}" != "" ]; then
-  if [ -f "${HIVE_AUX_JARS_PATH}" ]; then
+  if [ -f "${HIVE_AUX_JARS_PATH}" ] || [ -d "${HIVE_AUX_JARS_PATH}" ] ; then
     export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}
   elif [ -d "/usr/hdp/current/hive-webhcat/share/hcatalog" ]; then
     export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-site.xml
index a07c16f..c0800ed 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-site.xml
@@ -32,4 +32,13 @@ limitations under the License.
     </description>
     <on-ambari-upgrade add="false"/>
   </property>
+
+  <property>
+    <name>hive.custom-extensions.root</name>
+    <value>/hdp/ext/{{major_stack_version}}/hive</value>
+    <description>
+        Root directory for hive extensions in HDFS
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration-mapred/mapred-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration-mapred/mapred-site.xml
index f03125b..23c69a7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration-mapred/mapred-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration-mapred/mapred-site.xml
@@ -25,4 +25,13 @@
         </description>
         <on-ambari-upgrade add="false"/>
     </property>
+    <property>
+      <name>mapreduce.application.classpath</name>
+      <value>$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure:/usr/hdp/current/ext/hadoop/*</value>
+      <description>
+        CLASSPATH for MR applications. A comma-separated list of CLASSPATH
+        entries.
+      </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a78ceab/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
index 5f8de49..9094b15 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
@@ -19,7 +19,7 @@
 <configuration supports_final="true">
   <property>
     <name>yarn.application.classpath</name>
-    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*</value>
+    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*,/usr/hdp/current/ext/hadoop/*</value>
     <description>Classpath for typical applications.</description>
     <on-ambari-upgrade add="true"/>
   </property>