You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by wu...@apache.org on 2022/10/24 03:44:10 UTC

[ambari] branch trunk updated: AMBARI-25760: Fix fail to start Namenode for BIGTOP stack after introduce bigtop-select (#3417)

This is an automated email from the ASF dual-hosted git repository.

wuzhiguo pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new d03bb0cff2 AMBARI-25760: Fix fail to start Namenode for BIGTOP stack after introduce bigtop-select (#3417)
d03bb0cff2 is described below

commit d03bb0cff278c182888aab1a0a1a370a7ce17d3f
Author: Yu Hou <52...@qq.com>
AuthorDate: Mon Oct 24 11:44:01 2022 +0800

    AMBARI-25760: Fix fail to start Namenode for BIGTOP stack after introduce bigtop-select (#3417)
---
 .../libraries/functions/conf_select.py             | 15 +-----
 .../libraries/functions/get_stack_version.py       |  2 +-
 .../libraries/functions/stack_select.py            | 44 ++++++++++++---
 .../stack-hooks/after-INSTALL/scripts/params.py    | 10 ++--
 .../stack-hooks/before-ANY/scripts/params.py       | 11 +++-
 .../stack-hooks/before-START/scripts/params.py     |  6 ++-
 .../BIGTOP/3.2.0/properties/stack_features.json    |  5 --
 .../BIGTOP/3.2.0/properties/stack_packages.json    | 33 ++++++++----
 .../services/HDFS/configuration/hadoop-env.xml     | 15 +++---
 .../services/HDFS/configuration/ssl-client.xml     |  2 +-
 .../services/HDFS/package/scripts/params_linux.py  | 12 ++---
 .../3.2.0/services/HDFS/package/scripts/utils.py   |  2 -
 .../YARN/configuration-mapred/mapred-site.xml      |  4 +-
 .../3.2.0/services/YARN/configuration/yarn-env.xml |  2 +-
 .../services/YARN/configuration/yarn-site.xml      |  2 +-
 .../services/YARN/package/scripts/historyserver.py | 31 -----------
 .../YARN/package/scripts/mapred_service_check.py   |  4 +-
 .../services/YARN/package/scripts/params_linux.py  | 62 ++++------------------
 .../3.2.0/services/YARN/package/scripts/yarn.py    |  6 +--
 .../after-INSTALL/test_after_install.py            | 12 ++---
 20 files changed, 122 insertions(+), 158 deletions(-)

diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 4474606432..b56d80f74a 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -162,21 +162,8 @@ def get_hadoop_conf_dir():
   directory including the component's version is tried first, but if that doesn't exist,
   this will fallback to using "current".
   """
-  stack_root = Script.get_stack_root()
-  stack_version = Script.get_stack_version()
-
   hadoop_conf_dir = os.path.join(os.path.sep, "etc", "hadoop", "conf")
-  if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
-    # read the desired version from the component map and use that for building the hadoop home
-    version = component_version.get_component_repository_version()
-    if version is None:
-      version = default("/commandParams/version", None)
-
-    hadoop_conf_dir = os.path.join(stack_root, str(version), "hadoop", "conf")
-    if version is None or sudo.path_isdir(hadoop_conf_dir) is False:
-      hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
-
-    Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
+  Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
 
   return hadoop_conf_dir
 
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/get_stack_version.py b/ambari-common/src/main/python/resource_management/libraries/functions/get_stack_version.py
index 49416af969..64ea6671ad 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/get_stack_version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/get_stack_version.py
@@ -85,7 +85,7 @@ def get_stack_version(package_name):
 
   stack_version = re.sub(package_name + ' - ', '', stack_output)
   stack_version = stack_version.rstrip()
-  match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+(-[0-9]+)?', stack_version)
+  match = re.match('[0-9]+.[0-9]+.[0-9]+', stack_version)
 
   if match is None:
     Logger.info('Failed to get extracted version with ' + stack_selector_path)
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index 1aab6e3fba..1eaf49ed50 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -60,18 +60,24 @@ SERVICE_CHECK_DIRECTORY_MAP = {
 # <stack-root>/current/hadoop-client/[bin|sbin|libexec|lib]
 # <stack-root>/2.3.0.0-1234/hadoop/[bin|sbin|libexec|lib]
 HADOOP_DIR_TEMPLATE = "{0}/{1}/{2}/{3}"
+HADOOP_REAL_DIR_TEMPLATE = "{0}/{1}/{2}/{3}/{4}"
 
 # <stack-root>/current/hadoop-client
 # <stack-root>/2.3.0.0-1234/hadoop
 HADOOP_HOME_DIR_TEMPLATE = "{0}/{1}/{2}"
+HADOOP_REAL_HOME_DIR_TEMPLATE = "{0}/{1}/{2}/{3}"
+LIB_DIR = 'usr/lib'
+BIN_DIR = 'usr/bin'
 
 HADOOP_DIR_DEFAULTS = {
   "home": "/usr/lib/hadoop",
+  "hdfs_home": "/usr/lib/hadoop-hdfs",
+  "mapred_home": "/usr/lib/hadoop-mapreduce",
+  "yarn_home": "/usr/lib/hadoop-yarn",
   "libexec": "/usr/lib/hadoop/libexec",
   "sbin": "/usr/lib/hadoop/sbin",
   "bin": "/usr/bin",
-  "lib": "/usr/lib/hadoop/lib",
-  "conf": "/etc/hadoop/conf"
+  "lib": "/usr/lib/hadoop/lib"
 }
 
 PACKAGE_SCOPE_INSTALL = "INSTALL"
@@ -387,11 +393,27 @@ def get_hadoop_dir(target):
 
     # home uses a different template
     if target == "home":
-      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, version, "hadoop")
+      hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, version, LIB_DIR, "hadoop")
       if version is None or sudo.path_isdir(hadoop_dir) is False:
         hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client")
+    elif target == "hdfs_home":
+      hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, version, LIB_DIR, "hadoop-hdfs")
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-hdfs-client")
+    elif target == "mapred_home":
+      hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, version, LIB_DIR, "hadoop-mapreduce")
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-mapreduce-client")
+    elif target == "yarn_home":
+      hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, version, LIB_DIR, "hadoop-yarn")
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-yarn-client")
+    elif target == "bin":
+      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, version, BIN_DIR)
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_DIR_DEFAULTS[target]
     else:
-      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, version, "hadoop", target)
+      hadoop_dir = HADOOP_REAL_DIR_TEMPLATE.format(stack_root, version, LIB_DIR, "hadoop", target)
       if version is None or sudo.path_isdir(hadoop_dir) is False:
         hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client", target)
 
@@ -412,9 +434,17 @@ def get_hadoop_dir_for_stack_version(target, stack_version):
 
   # home uses a different template
   if target == "home":
-    hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
+    hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, stack_version, LIB_DIR, "hadoop")
+  elif target == "hdfs_home":
+    hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, stack_version, LIB_DIR, "hadoop-hdfs")
+  elif target == "mapred_home":
+    hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, stack_version, LIB_DIR, "hadoop-mapreduce")
+  elif target == "yarn_home":
+    hadoop_dir = HADOOP_REAL_HOME_DIR_TEMPLATE.format(stack_root, stack_version, LIB_DIR, "hadoop-yarn")
+  elif target == "bin":
+    hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, BIN_DIR)
   else:
-    hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
+    hadoop_dir = HADOOP_REAL_DIR_TEMPLATE.format(stack_root, stack_version, LIB_DIR, "hadoop", target)
 
   return hadoop_dir
 
@@ -480,7 +510,7 @@ def get_stack_version_before_install(component_name):
   stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
   if os.path.islink(component_dir):
     stack_version = os.path.basename(os.path.dirname(os.path.dirname(os.path.dirname(os.readlink(component_dir)))))
-    match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', stack_version)
+    match = re.match('[0-9]+.[0-9]+.[0-9]+', stack_version)
     if match is None:
       Logger.info('Failed to get extracted version with {0} in method get_stack_version_before_install'.format(stack_selector_name))
       return None # lazy fail
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
index 699d33a9ee..70737a1dfb 100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@ -28,6 +28,7 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format_jvm_option
 from resource_management.libraries.functions.version import format_stack_version, get_major_version
+from resource_management.libraries.functions import format
 from string import lower
 
 config = Script.get_config()
@@ -58,11 +59,14 @@ logsearch_config_file_path = agent_cache_dir + "/" + service_package_folder + "/
 logsearch_config_file_exists = os.path.isfile(logsearch_config_file_path)
 
 # default hadoop params
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_hdfs_home = stack_select.get_hadoop_dir("hdfs_home")
+hadoop_mapred_home = stack_select.get_hadoop_dir("mapred_home")
+hadoop_yarn_home = stack_select.get_hadoop_dir("yarn_home")
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
-mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-
-versioned_stack_root = '/usr/hdp/current'
+mapreduce_libs_path = format("{hadoop_mapred_home}/*,{hadoop_mapred_home}/lib/*")
 
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
index f4481fc4e2..9e6631d1bb 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
@@ -109,6 +109,9 @@ def is_secure_port(port):
 # force the use of "current" in the hook
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
 hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_hdfs_home = stack_select.get_hadoop_dir("hdfs_home")
+hadoop_mapred_home = stack_select.get_hadoop_dir("mapred_home")
+hadoop_yarn_home = stack_select.get_hadoop_dir("yarn_home")
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
@@ -124,7 +127,13 @@ hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
 is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])
 
-mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+mapreduce_libs_path = format("{hadoop_mapred_home}/*,{hadoop_mapred_home}/lib/*")
+
+tez_home = '/usr/lib/tez'
+tez_conf_dir = '/etc/tez/conf'
+# hadoop parameters for stacks that support rolling_upgrade
+if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+  tez_home = format("{stack_root}/current/tez-client")
 
 if not security_enabled:
   hadoop_secure_dn_user = '""'
diff --git a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
index 7988bd3b5f..139ce60c88 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
@@ -72,8 +72,12 @@ hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
 
-mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
 hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_hdfs_home = stack_select.get_hadoop_dir("hdfs_home")
+hadoop_mapred_home = stack_select.get_hadoop_dir("mapred_home")
+hadoop_yarn_home = stack_select.get_hadoop_dir("yarn_home")
+
+mapreduce_libs_path = format("{hadoop_mapred_home}/*,{hadoop_mapred_home}/lib/*")
 create_lib_snappy_symlinks = False
   
 current_service = config['serviceName']
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_features.json b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_features.json
index 2d8b7669d8..eb831b61c9 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_features.json
@@ -1,11 +1,6 @@
 {
   "BIGTOP": {
     "stack_features": [
-      {
-        "name": "snappy",
-        "description": "Snappy compressor/decompressor support",
-        "min_version": "3.2.0"
-      },
       {
         "name": "lzo",
         "description": "LZO libraries support",
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
index 1e1bd47a3e..01a94b0629 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
@@ -45,7 +45,8 @@
         "DATANODE": {
           "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
           "INSTALL": [
-            "hadoop-hdfs-datanode"
+            "hadoop-hdfs-datanode",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-hdfs-datanode"
@@ -57,7 +58,8 @@
         "HDFS_CLIENT": {
           "STACK-SELECT-PACKAGE": "hadoop-hdfs-client",
           "INSTALL": [
-            "hadoop-hdfs-client"
+            "hadoop-hdfs-client",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-hdfs-client"
@@ -69,7 +71,8 @@
         "NAMENODE": {
           "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
           "INSTALL": [
-            "hadoop-hdfs-namenode"
+            "hadoop-hdfs-namenode",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-hdfs-namenode"
@@ -81,7 +84,8 @@
         "JOURNALNODE": {
           "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
           "INSTALL": [
-            "hadoop-hdfs-journalnode"
+            "hadoop-hdfs-journalnode",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-hdfs-journalnode"
@@ -93,7 +97,8 @@
         "SECONDARY_NAMENODE": {
           "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
           "INSTALL": [
-            "hadoop-hdfs-secondarynamenode"
+            "hadoop-hdfs-secondarynamenode",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-hdfs-secondarynamenode"
@@ -105,7 +110,8 @@
         "ZKFC": {
           "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
           "INSTALL": [
-            "hadoop-hdfs-zkfc"
+            "hadoop-hdfs-zkfc",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-hdfs-zkfc"
@@ -171,7 +177,8 @@
         "HISTORYSERVER": {
           "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
           "INSTALL": [
-            "hadoop-mapreduce-historyserver"
+            "hadoop-mapreduce-historyserver",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-mapreduce-historyserver"
@@ -183,7 +190,8 @@
         "MAPREDUCE2_CLIENT": {
           "STACK-SELECT-PACKAGE": "hadoop-mapreduce-client",
           "INSTALL": [
-            "hadoop-mapreduce-client"
+            "hadoop-mapreduce-client",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-mapreduce-client"
@@ -289,7 +297,8 @@
         "NODEMANAGER": {
           "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
           "INSTALL": [
-            "hadoop-yarn-nodemanager"
+            "hadoop-yarn-nodemanager",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-yarn-nodemanager"
@@ -301,7 +310,8 @@
         "RESOURCEMANAGER": {
           "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
           "INSTALL": [
-            "hadoop-yarn-resourcemanager"
+            "hadoop-yarn-resourcemanager",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-yarn-resourcemanager"
@@ -313,7 +323,8 @@
         "YARN_CLIENT": {
           "STACK-SELECT-PACKAGE": "hadoop-yarn-client",
           "INSTALL": [
-            "hadoop-yarn-client"
+            "hadoop-yarn-client",
+            "hadoop-client"
           ],
           "PATCH": [
             "hadoop-yarn-client"
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/hadoop-env.xml
index 06da0ac17c..cd46554318 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/hadoop-env.xml
@@ -255,7 +255,11 @@ export JAVA_HOME={{java_home}}
 export HADOOP_HOME_WARN_SUPPRESS=1
 
 # Hadoop home directory
-export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}
+export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+export HADOOP_COMMON_HOME={{hadoop_home}}
+export HADOOP_HDFS_HOME={{hadoop_hdfs_home}}
+export HADOOP_MAPRED_HOME={{hadoop_mapred_home}}
+export HADOOP_YARN_HOME={{hadoop_yarn_home}}
 
 # Hadoop Configuration Directory
 #TODO: if env var set that can cause problems
@@ -358,14 +362,11 @@ if [ -d "/usr/share/java" ]; then
   done
 fi
 
-# Add libraries required by nodemanager
-MAPREDUCE_LIBS={{mapreduce_libs_path}}
-
 # Add libraries to the hadoop classpath - some may not need a colon as they already include it
-export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}
+export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}
 
-if [ -d "/usr/lib/tez" ]; then
-  export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf
+if [ -d "{{tez_home}}" ]; then
+  export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:{{tez_home}}/*:{{tez_home}}/lib/*:{{tez_conf_dir}}
 fi
 
 # Setting path to hdfs command line
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/ssl-client.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/ssl-client.xml
index 6ec064af19..d364497f39 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/ssl-client.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/configuration/ssl-client.xml
@@ -67,4 +67,4 @@
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
-</configuration>
+</configuration>
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/params_linux.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/params_linux.py
index 1b36cb3338..806a6220f7 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/params_linux.py
@@ -102,19 +102,17 @@ hdfs_tmp_dir = default("/configurations/hadoop-env/hdfs_tmp_dir", "/tmp")
 namenode_backup_dir = default("/configurations/hadoop-env/namenode_backup_dir", "/tmp/upgrades")
 
 # hadoop default parameters
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 hadoop_home = stack_select.get_hadoop_dir("home")
-hadoop_secure_dn_user = hdfs_user
+hadoop_hdfs_home = stack_select.get_hadoop_dir("hdfs_home")
+hadoop_mapred_home = stack_select.get_hadoop_dir("mapred_home")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_secure_dn_user = hdfs_user
 hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
-hadoop_lib_home = stack_select.get_hadoop_dir("lib")
-
-# hadoop parameters for stacks that support rolling_upgrade
-if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
-  mapreduce_libs_path = format("{stack_root}/current/hadoop-mapreduce-client/*")
+mapreduce_libs_path = format("{hadoop_mapred_home}/*")
 
 if not security_enabled:
   hadoop_secure_dn_user = '""'
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/utils.py
index 0281a61e1b..f94685eddf 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HDFS/package/scripts/utils.py
@@ -383,8 +383,6 @@ def get_hdfs_binary(distro_component_name):
   """
   import params
   hdfs_binary = "hdfs"
-  if params.stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version_formatted):
-    hdfs_binary = "{0}/current/{1}/bin/hdfs".format(params.stack_root, distro_component_name)
 
   return hdfs_binary
 
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration-mapred/mapred-site.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration-mapred/mapred-site.xml
index d78d6ef4fe..8a01b3d576 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration-mapred/mapred-site.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration-mapred/mapred-site.xml
@@ -373,7 +373,7 @@
   </property>
   <property>
     <name>mapreduce.application.classpath</name>
-    <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*</value>
+    <value>{{hadoop_conf_dir}},{{hadoop_home}}/*,{{hadoop_home}}/lib/*,{{hadoop_hdfs_home}}/*,{{hadoop_hdfs_home}}/lib/*,{{hadoop_yarn_home}}/*,{{hadoop_yarn_home}}/lib/*,{{hadoop_mapred_home}}/*,{{hadoop_mapred_home}}/lib/*</value>
     <description>
       CLASSPATH for MR applications. A comma-separated list of CLASSPATH
       entries.
@@ -441,7 +441,7 @@
   </property>
   <property>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH={{hadoop_lib_home}}/native:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64:./mr-framework/hadoop/lib/native:./mr-framework/hadoop/lib/native/Linux-{{architecture}}-64</value>
+    <value>LD_LIBRARY_PATH={{hadoop_home}}/lib/native</value>
     <description>
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-env.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-env.xml
index b47ed90e86..81ff757f7a 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-env.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-env.xml
@@ -233,7 +233,7 @@ HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
 HADOOP_OPTS="$HADOOP_OPTS -Dyarn.log.dir=$HADOOP_LOG_DIR"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
 HADOOP_OPTS="$HADOOP_OPTS -Dyarn.log.file=$HADOOP_LOGFILE"
-HADOOP_OPTS="$HADOOP_OPTS -Dyarn.home.dir=$HADOOP_COMMON_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dyarn.home.dir=$HADOOP_YARN_HOME"
 HADOOP_OPTS="$HADOOP_OPTS -Dyarn.id.str=$HADOOP_IDENT_STRING"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
 HADOOP_OPTS="$HADOOP_OPTS -Dyarn.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-site.xml
index 7f044b1d8b..00d834a6a3 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/configuration/yarn-site.xml
@@ -148,7 +148,7 @@
   </property>
   <property>
     <name>yarn.application.classpath</name>
-    <value>/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*</value>
+    <value>{{hadoop_conf_dir}},{{hadoop_home}}/*,{{hadoop_home}}/lib/*,{{hadoop_hdfs_home}}/*,{{hadoop_hdfs_home}}/lib/*,{{hadoop_yarn_home}}/*,{{hadoop_yarn_home}}/lib/*</value>
     <description>Classpath for typical applications.</description>
     <on-ambari-upgrade add="true"/>
   </property>
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/historyserver.py
index a93bc17a57..e10d514ca0 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/historyserver.py
@@ -73,42 +73,11 @@ class HistoryServerDefault(HistoryServer):
     import params
     env.set_params(params)
 
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      stack_select.select_packages(params.version)
-      # MC Hammer said, "Can't touch this"
-      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-      copy_to_hdfs("slider", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-      params.HdfsResource(None, action="execute")
-
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
 
-    if check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.version_for_stack_feature_checks):
-      # MC Hammer said, "Can't touch this"
-      resource_created = copy_to_hdfs(
-        "mapreduce",
-        params.user_group,
-        params.hdfs_user,
-        skip=params.sysprep_skip_copy_tarballs_hdfs)
-      resource_created = copy_to_hdfs(
-        "tez",
-        params.user_group,
-        params.hdfs_user,
-        skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
-      resource_created = copy_to_hdfs(
-        "slider",
-        params.user_group,
-        params.hdfs_user,
-        skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
-      if resource_created:
-        params.HdfsResource(None, action="execute")
-    else:
-      # In stack versions before copy_tarball_to_hdfs support tez.tar.gz was copied to a different folder in HDFS.
-      install_tez_jars()
-
     service('historyserver', action='start', serviceName='mapreduce')
 
   def status(self, env):
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/mapred_service_check.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/mapred_service_check.py
index dd6d65f39b..d91d60a2dd 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/mapred_service_check.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/mapred_service_check.py
@@ -73,7 +73,7 @@ class MapReduce2ServiceCheckWindows(MapReduce2ServiceCheck):
     # hadoop_exe = os.path.join(params.hadoop_home, "bin", "hadoop")
     #
     # tested_file = os.path.join(params.hadoop_home, "bin", "hadoop.cmd")
-    # jar_path = os.path.join(params.hadoop_mapred2_jar_location, params.hadoopMapredExamplesJarName)
+    # jar_path = os.path.join(params.hadoop_mapred_home, params.hadoopMapredExamplesJarName)
     # input_file = format("/user/hadoop/mapredsmokeinput")
     # output_file = format("/user/hadoop/mapredsmokeoutput")
     # cleanup_cmd = format("cmd /C {hadoop_exe} fs -rm -r -f {output_file} {input_file}")
@@ -118,7 +118,7 @@ class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     import params
     env.set_params(params)
 
-    jar_path = format("{hadoop_mapred2_jar_location}/{hadoopMapredExamplesJarName}")
+    jar_path = format("{hadoop_mapred_home}/{hadoopMapredExamplesJarName}")
     source_file = format("/etc/passwd")
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/params_linux.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/params_linux.py
index d5859fd765..5950386ad3 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/params_linux.py
@@ -66,9 +66,6 @@ stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 tarball_map = default("/configurations/cluster-env/tarball_map", None)
 
-config_path = stack_select.get_hadoop_dir("conf")
-config_dir = os.path.realpath(config_path)
-
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
 
@@ -141,57 +138,18 @@ hostname = config['agentLevelParams']['hostname']
 # hadoop default parameters
 hadoop_home = status_params.hadoop_home
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+hadoop_hdfs_home = stack_select.get_hadoop_dir("hdfs_home")
+hadoop_mapred_home = stack_select.get_hadoop_dir("mapred_home")
+hadoop_yarn_home = stack_select.get_hadoop_dir("yarn_home")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_yarn_home = '/usr/lib/hadoop-yarn'
-hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
-mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
-yarn_bin = "/usr/lib/hadoop-yarn/sbin"
-yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
+mapred_bin = format("{hadoop_mapred_home}/sbin")
+yarn_bin = format("{hadoop_yarn_home}/sbin")
+yarn_container_bin = format("{hadoop_yarn_home}/bin")
 hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 
-# hadoop parameters stack supporting rolling_uprade
-if stack_supports_ru:
-  # MapR directory root
-  mapred_role_root = "hadoop-mapreduce-client"
-  command_role = default("/role", "")
-  if command_role in MAPR_SERVER_ROLE_DIRECTORY_MAP:
-    mapred_role_root = MAPR_SERVER_ROLE_DIRECTORY_MAP[command_role]
-
-  # YARN directory root
-  yarn_role_root = "hadoop-yarn-client"
-  if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
-    yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
-
-  # defaults set to current based on role
-  hadoop_mapr_home = format("{stack_root}/current/{mapred_role_root}")
-  hadoop_yarn_home = format("{stack_root}/current/{yarn_role_root}")
-
-  # try to render the specific version
-  version = component_version.get_component_repository_version()
-  if version is None:
-    version = default("/commandParams/version", None)
-
-
-  if version is not None:
-    hadoop_mapr_versioned_home = format("{stack_root}/{version}/hadoop-mapreduce")
-    hadoop_yarn_versioned_home = format("{stack_root}/{version}/hadoop-yarn")
-
-    if sudo.path_isdir(hadoop_mapr_versioned_home):
-      hadoop_mapr_home = hadoop_mapr_versioned_home
-
-    if sudo.path_isdir(hadoop_yarn_versioned_home):
-      hadoop_yarn_home = hadoop_yarn_versioned_home
-
-
-  hadoop_mapred2_jar_location = hadoop_mapr_home
-  mapred_bin = format("{hadoop_mapr_home}/sbin")
-
-  yarn_bin = format("{hadoop_yarn_home}/sbin")
-  yarn_container_bin = format("{hadoop_yarn_home}/bin")
-
 
 if stack_supports_timeline_state_store:
   # Timeline Service property that was added timeline_state_store stack feature
@@ -355,7 +313,7 @@ if security_enabled:
   rm_principal_name = rm_principal_name.replace('_HOST',hostname.lower())
   rm_keytab = config['configurations']['yarn-site']['yarn.resourcemanager.keytab']
   rm_kinit_cmd = format("{kinit_path_local} -kt {rm_keytab} {rm_principal_name};")
-  yarn_jaas_file = os.path.join(config_dir, 'yarn_jaas.conf')
+  yarn_jaas_file = os.path.join(hadoop_conf_dir, 'yarn_jaas.conf')
   if stack_supports_zk_security:
     zk_principal_name = default("/configurations/zookeeper-env/zookeeper_principal_name", "zookeeper/_HOST@EXAMPLE.COM")
     zk_principal_user = zk_principal_name.split('/')[0]
@@ -367,7 +325,7 @@ if security_enabled:
     yarn_timelineservice_principal_name = yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
     yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
     yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {yarn_timelineservice_keytab} {yarn_timelineservice_principal_name};")
-    yarn_ats_jaas_file = os.path.join(config_dir, 'yarn_ats_jaas.conf')
+    yarn_ats_jaas_file = os.path.join(hadoop_conf_dir, 'yarn_ats_jaas.conf')
 
   if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
     nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
@@ -376,13 +334,13 @@ if security_enabled:
 
     nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
     nodemanager_kinit_cmd = format("{kinit_path_local} -kt {nodemanager_keytab} {nodemanager_principal_name};")
-    yarn_nm_jaas_file = os.path.join(config_dir, 'yarn_nm_jaas.conf')
+    yarn_nm_jaas_file = os.path.join(hadoop_conf_dir, 'yarn_nm_jaas.conf')
 
   if has_hs:
     mapred_jhs_principal_name = config['configurations']['mapred-site']['mapreduce.jobhistory.principal']
     mapred_jhs_principal_name = mapred_jhs_principal_name.replace('_HOST', hostname.lower())
     mapred_jhs_keytab = config['configurations']['mapred-site']['mapreduce.jobhistory.keytab']
-    mapred_jaas_file = os.path.join(config_dir, 'mapred_jaas.conf')
+    mapred_jaas_file = os.path.join(hadoop_conf_dir, 'mapred_jaas.conf')
 
 yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
 yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/yarn.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/yarn.py
index 6145525bcf..1cc4d8aedc 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/YARN/package/scripts/yarn.py
@@ -512,20 +512,20 @@ def create_local_dir(dir_name):
 def yarn(name = None):
   import params
   XmlConfig("mapred-site.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['mapred-site'],
             owner=params.yarn_user,
             mode='f'
   )
   XmlConfig("yarn-site.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['yarn-site'],
             owner=params.yarn_user,
             mode='f',
             configuration_attributes=params.config['configurationAttributes']['yarn-site']
   )
   XmlConfig("capacity-scheduler.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['capacity-scheduler'],
             owner=params.yarn_user,
             mode='f'
diff --git a/ambari-server/src/test/python/stacks/stack-hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/stack-hooks/after-INSTALL/test_after_install.py
index 6b2118b2d7..283dd1d21b 100644
--- a/ambari-server/src/test/python/stacks/stack-hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/stack-hooks/after-INSTALL/test_after_install.py
@@ -99,10 +99,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
+      conf_dir = "/etc/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
-      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
+      only_if="ls /etc/hadoop/conf",
       xml_include_file=None)
 
     self.assertResourceCalled('Directory',
@@ -167,10 +167,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
+      conf_dir = "/etc/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
-      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
+      only_if="ls /etc/hadoop/conf",
       xml_include_file=None)
 
     self.assertResourceCalled('Directory',
@@ -269,10 +269,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
+      conf_dir = "/etc/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configurationAttributes']['core-site'],
-      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf",
+      only_if="ls /etc/hadoop/conf",
       xml_include_file=None)
 
     self.assertResourceCalled('Directory',


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ambari.apache.org
For additional commands, e-mail: commits-help@ambari.apache.org