You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2014/12/11 21:02:07 UTC

ambari git commit: AMBARI-8449. Upgrade Pack definition for Core Master Components (alejandro)

Repository: ambari
Updated Branches:
  refs/heads/trunk d613ef522 -> 5b75e7abf


AMBARI-8449. Upgrade Pack definition for Core Master Components (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5b75e7ab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5b75e7ab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5b75e7ab

Branch: refs/heads/trunk
Commit: 5b75e7abf6ee0034184e2a79db5fe6304a172b08
Parents: d613ef5
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Mon Dec 8 19:45:56 2014 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Dec 11 12:02:03 2014 -0800

----------------------------------------------------------------------
 .../python/resource_management/core/shell.py    |  10 +-
 .../dynamic_variable_interpretation.py          |   1 +
 .../libraries/script/script.py                  |  16 ++-
 .../ambari/server/state/UpgradeHelper.java      |  74 +++++++++++-
 .../server/state/stack/upgrade/Grouping.java    |  10 +-
 .../apache/ambari/server/utils/HTTPUtils.java   |  80 +++++++++++++
 .../custom_actions/scripts/ru_execute_tasks.py  |  11 +-
 .../FLUME/package/scripts/flume_handler.py      |   4 +-
 .../GANGLIA/package/scripts/ganglia_monitor.py  |   4 +-
 .../GANGLIA/package/scripts/ganglia_server.py   |   4 +-
 .../HBASE/package/scripts/hbase_master.py       |   4 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   4 +-
 .../services/HDFS/package/scripts/__init__.py   |  20 ++++
 .../services/HDFS/package/scripts/datanode.py   |   7 +-
 .../HDFS/package/scripts/hdfs_client.py         |   4 +-
 .../HDFS/package/scripts/hdfs_namenode.py       |  21 +++-
 .../HDFS/package/scripts/journalnode.py         |  15 ++-
 .../services/HDFS/package/scripts/namenode.py   |  48 +++++---
 .../services/HDFS/package/scripts/params.py     |   7 +-
 .../services/HDFS/package/scripts/snamenode.py  |  11 +-
 .../services/HDFS/package/scripts/utils.py      | 119 +++++++++++++++++--
 .../services/HDFS/package/scripts/zkfc_slave.py |  12 +-
 .../HIVE/package/scripts/hive_metastore.py      |   4 +-
 .../HIVE/package/scripts/hive_server.py         |   4 +-
 .../HIVE/package/scripts/mysql_server.py        |   4 +-
 .../HIVE/package/scripts/webhcat_server.py      |   4 +-
 .../OOZIE/package/scripts/oozie_server.py       |   4 +-
 .../scripts/application_timeline_server.py      |  16 ++-
 .../YARN/package/scripts/historyserver.py       |  17 ++-
 .../YARN/package/scripts/nodemanager.py         |  41 ++++++-
 .../services/YARN/package/scripts/params.py     |  11 +-
 .../YARN/package/scripts/resourcemanager.py     |  14 ++-
 .../ZOOKEEPER/package/scripts/params.py         |   6 +-
 .../package/scripts/zookeeper_server.py         |  19 +--
 .../stacks/HDP/2.2/upgrades/upgrade-2.2.xml     |  57 ++++++++-
 .../ambari/server/state/UpgradeHelperTest.java  |   3 +-
 36 files changed, 575 insertions(+), 115 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-common/src/main/python/resource_management/core/shell.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/shell.py b/ambari-common/src/main/python/resource_management/core/shell.py
index ef44ca4..6179b44 100644
--- a/ambari-common/src/main/python/resource_management/core/shell.py
+++ b/ambari-common/src/main/python/resource_management/core/shell.py
@@ -35,10 +35,18 @@ SUDO_ENVIRONMENT_PLACEHOLDER = "{ENV_PLACEHOLDER}"
 
 def checked_call(command, verbose=False, logoutput=False,
          cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None, path=None, output_file=None, sudo=False):
+  """
+  Execute the process and throw an exception on failure.
+  @return: return_code, stdout
+  """
   return _call(command, verbose, logoutput, True, cwd, env, preexec_fn, user, wait_for_finish, timeout, path, output_file, sudo)
 
 def call(command, verbose=False, logoutput=False,
          cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None, path=None, output_file=None, sudo=False):
+  """
+  Execute the process despite failures.
+  @return: return_code, stdout
+  """
   return _call(command, verbose, logoutput, False, cwd, env, preexec_fn, user, wait_for_finish, timeout, path, output_file, sudo)
             
 def _call(command, verbose=False, logoutput=False, throw_on_failure=True,
@@ -51,7 +59,7 @@ def _call(command, verbose=False, logoutput=False, throw_on_failure=True,
   @param logoutput: boolean, whether command output should be logged of not
   @param throw_on_failure: if true, when return code is not zero exception is thrown
   
-  @return: retrun_code, stdout
+  @return: return_code, stdout
   """
 
   # Append current PATH to env['PATH'] and path

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
index 66df9ff..5f952d3 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
@@ -146,6 +146,7 @@ def copy_tarballs_to_hdfs(tarball_prefix, component_user, file_owner, group_owne
 
   # Ubuntu returns: "stdin: is not a tty", as subprocess output.
   tmpfile = tempfile.NamedTemporaryFile()
+  out = None
   with open(tmpfile.name, 'r+') as file:
     get_hdp_version_cmd = '/usr/bin/hdp-select versions > %s' % tmpfile.name
     code, stdoutdata = shell.call(get_hdp_version_cmd)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 64af325..b66110d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -244,13 +244,13 @@ class Script(object):
     sys.stderr.write("Error: " + message)
     sys.exit(1)
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     """
     To be overridden by subclasses
     """
     self.fail_with_error('start method isn\'t implemented')
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     """
     To be overridden by subclasses
     """
@@ -288,12 +288,20 @@ class Script(object):
 
       rolling_restart = restart_type.lower().startswith("rolling")
 
-      self.stop(env)
+      # To remain backward compatible with older stacks, only pass rolling_restart if True.
+      if rolling_restart:
+        self.stop(env, rolling_restart=rolling_restart)
+      else:
+        self.stop(env)
 
       if rolling_restart:
         self.pre_rolling_restart(env)
 
-      self.start(env)
+      # To remain backward compatible with older stacks, only pass rolling_restart if True.
+      if rolling_restart:
+        self.start(env, rolling_restart=rolling_restart)
+      else:
+        self.start(env)
 
       if rolling_restart:
         self.post_rolling_restart(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
index 7bd7591..15e3ddc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
@@ -19,9 +19,14 @@ package org.apache.ambari.server.state;
 
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.LinkedHashSet;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.ambari.server.controller.internal.RequestResourceProvider;
 import org.apache.ambari.server.controller.internal.StageResourceProvider;
@@ -45,6 +50,7 @@ import org.apache.ambari.server.state.stack.upgrade.Grouping;
 import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
 import org.apache.ambari.server.state.stack.upgrade.StageWrapperBuilder;
 import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
+import org.apache.ambari.server.utils.HTTPUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,6 +62,60 @@ public class UpgradeHelper {
   private static Logger LOG = LoggerFactory.getLogger(UpgradeHelper.class);
 
   /**
+   * Tuple of namenode states
+   */
+  public static class NameNodePair {
+    String activeHostName;
+    String standbyHostName;
+  }
+
+  /**
+   * Retrieve a class that represents a tuple of the active and standby namenodes. This should be called in an HA cluster.
+   * @param hosts
+   * @return
+   */
+  public static NameNodePair getNameNodePair(Set<String> hosts) {
+    if (hosts != null && hosts.size() == 2) {
+      Iterator iter = hosts.iterator();
+      HashMap<String, String> stateToHost = new HashMap<String, String>();
+      Pattern pattern = Pattern.compile("^.*org\\.apache\\.hadoop\\.hdfs\\.server\\.namenode\\.NameNode\".*?\"State\"\\s*:\\s*\"(.+?)\".*$");
+
+      while(iter.hasNext()) {
+        String hostname = (String) iter.next();
+        try {
+          // TODO Rolling Upgrade, don't hardcode jmx port number
+          // E.g.,
+          // dfs.namenode.http-address.dev.nn1 : c6401.ambari.apache.org:50070
+          // dfs.namenode.http-address.dev.nn2 : c6402.ambari.apache.org:50070
+          String endpoint = "http://" + hostname + ":50070/jmx";
+          String response = HTTPUtils.requestURL(endpoint);
+
+          if (response != null && !response.isEmpty()) {
+            Matcher matcher = pattern.matcher(response);
+            if (matcher.matches()) {
+              String state = matcher.group(1);
+              stateToHost.put(state.toLowerCase(), hostname);
+            }
+          } else {
+            throw new Exception("Response from endpoint " + endpoint + " was empty.");
+          }
+        } catch (Exception e) {
+          LOG.warn("Failed to parse namenode jmx endpoint to get state for host " + hostname + ". Error: " + e.getMessage());
+        }
+      }
+
+      if (stateToHost.containsKey("active") && stateToHost.containsKey("standby") && !stateToHost.get("active").equalsIgnoreCase(stateToHost.get("standby"))) {
+        NameNodePair pair = new NameNodePair();
+        pair.activeHostName = stateToHost.get("active");
+        pair.standbyHostName = stateToHost.get("standby");
+        return pair;
+      }
+    }
+
+    return null;
+  }
+
+  /**
    * Generates a list of UpgradeGroupHolder items that are used to execute an upgrade
    * @param cluster the cluster
    * @param upgradePack the upgrade pack
@@ -101,7 +161,19 @@ public class UpgradeHelper {
 
           ProcessingComponent pc = allTasks.get(service.serviceName).get(component);
 
-          builder.add(componentHosts, service.serviceName, pc);
+          // Special case for NAMENODE
+          if (service.serviceName.equalsIgnoreCase("HDFS") && component.equalsIgnoreCase("NAMENODE")) {
+              NameNodePair pair = getNameNodePair(componentHosts);
+              if (pair != null ) {
+                // The order is important, first do the standby, then the active namenode.
+                Set<String> order = new LinkedHashSet<String>();
+                order.add(pair.standbyHostName);
+                order.add(pair.activeHostName);
+                builder.add(order, service.serviceName, pc);
+              }
+          } else {
+            builder.add(componentHosts, service.serviceName, pc);
+          }
         }
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
index 15399b5..4f4b034 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
@@ -59,6 +59,13 @@ public class Grouping {
     private List<StageWrapper> stages = new ArrayList<StageWrapper>();
     private Set<String> serviceChecks = new HashSet<String>();
 
+    /**
+     * Add stages where the restart stages are ordered
+     * E.g., preupgrade, restart hosts(0), ..., restart hosts(n-1), postupgrade
+     * @param hosts the hosts
+     * @param service the service name
+     * @param pc the ProcessingComponent derived from the upgrade pack.
+     */
     @Override
     public void add(Set<String> hosts, String service, ProcessingComponent pc) {
       if (null != pc.preTasks && pc.preTasks.size() > 0) {
@@ -116,8 +123,5 @@ public class Grouping {
 
       return stages;
     }
-
   }
-
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/java/org/apache/ambari/server/utils/HTTPUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/HTTPUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/HTTPUtils.java
new file mode 100644
index 0000000..38ac643
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/HTTPUtils.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.utils;
+
+import org.apache.ambari.server.controller.internal.URLStreamProvider;
+import org.apache.ambari.server.proxy.ProxyService;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Static Helper methods for HTTP requests.
+ */
+public class HTTPUtils {
+
+  /**
+   * Issues a GET request against a URL.
+   * @param urlToRead URL to read from
+   * @return Returns a response string if successful, and empty otherwise.
+   */
+  public static String requestURL(String urlToRead) {
+    String result = "";
+    BufferedReader rd;
+    String line = null;
+    String url = urlToRead;
+
+    try {
+      URLStreamProvider urlStreamProvider = new URLStreamProvider(ProxyService.URL_CONNECT_TIMEOUT, ProxyService.URL_READ_TIMEOUT, null, null, null);
+
+      Map<String, List<String>> headers = new HashMap<String, List<String>>();
+
+      HttpURLConnection connection = urlStreamProvider.processURL(url, "GET", null, headers);
+
+      int responseCode = connection.getResponseCode();
+      InputStream resultInputStream = null;
+      if (responseCode >= ProxyService.HTTP_ERROR_RANGE_START) {
+        resultInputStream = connection.getErrorStream();
+      } else {
+        resultInputStream = connection.getInputStream();
+      }
+
+      rd = new BufferedReader(new InputStreamReader(resultInputStream));
+
+      if (rd != null) {
+        line = rd.readLine();
+        while (line != null) {
+          result += line;
+          line = rd.readLine();
+        }
+        rd.close();
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py b/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
index 269abdb..018dc6f 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
@@ -53,10 +53,11 @@ class ExecuteUpgradeTasks(Script):
     # Parse parameters
     config = Script.get_config()
 
-    # TODO HACK, should be retrieved from the command.
+    # TODO Rolling Upgrade, should be retrieved from the command.
     host_name = socket.gethostname()
     version = "2.2.0.0"
 
+    # TODO Rolling Upgrade, does this work on Ubuntu?
     code, out = checked_call("hdp-select")
     if code == 0 and out:
       p = re.compile(r"(2\.2\.0\.0\-\d{4})")
@@ -97,20 +98,20 @@ class ExecuteUpgradeTasks(Script):
           unless = replace_variables(unless, host_name, version)
 
           if first:
-            code, out = call(first)
+            code, out = call(first, verbose=True)
             Logger.info("Pre-condition command. Code: %s, Out: %s" % (str(code), str(out)))
             if code != 0:
               break
 
           if unless:
-            code, out = call(unless)
+            code, out = call(unless, verbose=True)
             Logger.info("Unless command. Code: %s, Out: %s" % (str(code), str(out)))
             if code == 0:
               break
 
           for i in range(1, effective_times+1):
             # TODO, Execute already has a tries and try_sleep, see hdfs_namenode.py for an example
-            code, out = call(command)
+            code, out = call(command, verbose=True)
             Logger.info("Command. Code: %s, Out: %s" % (str(code), str(out)))
 
             if code == 0 or code in ignore_return_codes:
@@ -121,7 +122,7 @@ class ExecuteUpgradeTasks(Script):
               try:
                 if on_failure:
                   on_failure = replace_variables(on_failure, host_name, version)
-                  code_failure_handler, out_failure_handler = call(on_failure)
+                  code_failure_handler, out_failure_handler = call(on_failure, verbose=True)
                   Logger.error("Failure Handler. Code: %s, Out: %s" % (str(code_failure_handler), str(out_failure_handler)))
               except:
                 pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
index ade2cf5..a13f507 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
@@ -31,7 +31,7 @@ class FlumeHandler(Script):
     self.install_packages(env)
     env.set_params(params)
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
@@ -39,7 +39,7 @@ class FlumeHandler(Script):
 
     flume(action='start')
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py
index 158f855..05ceaff 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py
@@ -37,13 +37,13 @@ class GangliaMonitor(Script):
     functions.turn_off_autostart(params.gmond_service_name)
     functions.turn_off_autostart("gmetad") # since the package is installed as well
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env)
     ganglia_monitor_service.monitor("start")
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     ganglia_monitor_service.monitor("stop")
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_server.py
index 93947b1..3492fff 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_server.py
@@ -37,13 +37,13 @@ class GangliaServer(Script):
     functions.turn_off_autostart(params.gmond_service_name) # since the package is installed as well
     functions.turn_off_autostart("gmetad")
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env)
     ganglia_server_service.server("start")
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
index a26254d..6cd77e7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
@@ -36,7 +36,7 @@ class HbaseMaster(Script):
 
     hbase(name='master')
     
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # for security
@@ -45,7 +45,7 @@ class HbaseMaster(Script):
       action = 'start'
     )
     
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
index 8d66dcc..8ce1e37 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
@@ -35,7 +35,7 @@ class HbaseRegionServer(Script):
 
     hbase(name='regionserver')
       
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # for security
@@ -44,7 +44,7 @@ class HbaseRegionServer(Script):
       action = 'start'
     )
     
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/__init__.py
new file mode 100644
index 0000000..35de4bb
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/__init__.py
@@ -0,0 +1,20 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
index 758fa5f..327a457 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
@@ -35,18 +35,17 @@ class DataNode(Script):
     import params
     env.set_params(params)
 
-    version = default("/commandParams/version", None)
-    if version and compare_versions(format_hdp_stack_version(version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-hdfs-datanode {version}"))
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
     self.configure(env)
     datanode(action="start")
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
index 3b4cf3e..b9f244a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
@@ -30,12 +30,12 @@ class HdfsClient(Script):
     env.set_params(params)
     self.config(env)
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index c773902..5e1116c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -17,12 +17,13 @@ limitations under the License.
 
 """
 
-from resource_management.core.shell import checked_call
 from resource_management import *
-from utils import service
+from resource_management.core.exceptions import ComponentIsNotRunning
 
+from utils import service, safe_zkfc_op
 
-def namenode(action=None, do_format=True):
+
+def namenode(action=None, do_format=True, rolling_restart=False, env=None):
   import params
   #we need this directory to be present before any action(HA manual steps for
   #additional namenode)
@@ -46,12 +47,21 @@ def namenode(action=None, do_format=True):
               group=params.user_group
     )
 
+    options = "-rollingUpgrade started" if rolling_restart else ""
+
     service(
-      action="start", name="namenode", user=params.hdfs_user,
+      action="start",
+      name="namenode",
+      user=params.hdfs_user,
+      options=options,
       create_pid_dir=True,
       create_log_dir=True
     )
 
+    if rolling_restart:    
+      # Must start Zookeeper Failover Controller if it exists on this host because it could have been killed in order to initiate the failover.
+      safe_zkfc_op(action, env)
+
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
               user = params.hdfs_user)
@@ -66,7 +76,7 @@ def namenode(action=None, do_format=True):
     # If HA is enabled and it is in standby, then stay in safemode, otherwise, leave safemode.
     leave_safe_mode = True
     if dfs_check_nn_status_cmd is not None:
-      code, out = shell.call(dfs_check_nn_status_cmd)
+      code, out = shell.call(dfs_check_nn_status_cmd) # If active NN, code will be 0
       if code != 0:
         leave_safe_mode = False
 
@@ -89,6 +99,7 @@ def namenode(action=None, do_format=True):
             only_if=dfs_check_nn_status_cmd #skip when HA not active
     )
     create_hdfs_directories(dfs_check_nn_status_cmd)
+
   if action == "stop":
     service(
       action="stop", name="namenode", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
index c0b2181..3da5d03 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
@@ -18,6 +18,9 @@ limitations under the License.
 """
 
 from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+
 from utils import service
 from hdfs import hdfs
 
@@ -29,7 +32,15 @@ class JournalNode(Script):
     self.install_packages(env, params.exclude_packages)
     env.set_params(params)
 
-  def start(self, env):
+  def pre_rolling_restart(self, env):
+    Logger.info("Executing Rolling Upgrade pre-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-hdfs-journalnode {version}"))
+
+  def start(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
@@ -45,7 +56,7 @@ class JournalNode(Script):
       create_log_dir=True
     )
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
index 68d0bd4..7e4e2e7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
@@ -17,16 +17,21 @@ limitations under the License.
 
 """
 
+import sys
+import os
+import json
+import subprocess
+from datetime import datetime
+
 from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.check_process_status import check_process_status
+
 from hdfs_namenode import namenode
 from hdfs import hdfs
-import time
-import json
-import subprocess
 import hdfs_rebalance
-import sys
-import os
-from datetime import datetime
+from utils import failover_namenode
 
 
 class NameNode(Script):
@@ -38,22 +43,21 @@ class NameNode(Script):
     #TODO we need this for HA because of manual steps
     self.configure(env)
 
-  def start(self, env):
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-    namenode(action="start")
-
   def pre_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade pre-restart")
     import params
     env.set_params(params)
 
-    version = default("/commandParams/version", None)
-    if version and compare_versions(format_hdp_stack_version(version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-hdfs-namenode {version}"))
 
+  def start(self, env, rolling_restart=False):
+    import params
+
+    env.set_params(params)
+    self.configure(env)
+    namenode(action="start", rolling_restart=rolling_restart, env=env)
+
   def post_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade post-restart")
     import params
@@ -63,18 +67,24 @@ class NameNode(Script):
             user=params.hdfs_principal_name if params.security_enabled else params.hdfs_user
     )
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
-
     env.set_params(params)
-    namenode(action="stop")
+
+    if rolling_restart and params.dfs_ha_enabled:
+      if params.dfs_ha_automatic_failover_enabled:
+        failover_namenode()
+      else:
+        raise Fail("Rolling Upgrade - dfs.ha.automatic-failover.enabled must be enabled to perform a rolling restart")
+
+    namenode(action="stop", rolling_restart=rolling_restart, env=env)
 
   def configure(self, env):
     import params
 
     env.set_params(params)
     hdfs()
-    namenode(action="configure")
+    namenode(action="configure", env=env)
     pass
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 02e6235..d88e16f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -30,6 +30,10 @@ tmp_dir = Script.get_tmp_dir()
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user = status_params.hdfs_user
 hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
@@ -45,7 +49,7 @@ secure_dn_ports_are_in_use = False
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
+  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"    # TODO Rolling Upgrade, switch from hadoop-client to server when starting daemon.
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = "/usr/hdp/current/hadoop-client"
   if not security_enabled:
@@ -178,6 +182,7 @@ data_dir_mount_file = config['configurations']['hadoop-env']['dfs.datanode.data.
 dfs_ha_enabled = False
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
 dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+dfs_ha_automatic_failover_enabled = default("/configurations/hdfs-site/dfs.ha.automatic-failover.enabled", False)
 
 namenode_id = None
 namenode_rpc = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
index 5eb25d2..7106422 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
@@ -23,6 +23,7 @@ from hdfs import hdfs
 
 
 class SNameNode(Script):
+
   def install(self, env):
     import params
 
@@ -30,18 +31,20 @@ class SNameNode(Script):
 
     self.install_packages(env, params.exclude_packages)
 
+  def pre_rolling_restart(self, env):
+    # Secondary namenode is actually removed in an HA cluster, which is a pre-requisite for Rolling Upgrade,
+    # so it does not need any Rolling Restart logic.
+    pass
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
-
     env.set_params(params)
 
     self.configure(env)
     snamenode(action="start")
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
-
     env.set_params(params)
 
     snamenode(action="stop")

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 3362f91..0a75145 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -17,15 +17,115 @@ limitations under the License.
 
 """
 import os
+import re
 
 from resource_management import *
-import re
+from resource_management.libraries.functions.format import format
+from resource_management.core.shell import call, checked_call
+from resource_management.core.exceptions import ComponentIsNotRunning
+
+from zkfc_slave import ZkfcSlave
+
+def safe_zkfc_op(action, env):
+  """
+  Idempotent operation on the zkfc process to either start or stop it.
+  :param action: start or stop
+  :param env: environment
+  """
+  zkfc = None
+  if action == "start":
+    try:
+      zkfc = ZkfcSlave()
+      zkfc.status(env)
+    except ComponentIsNotRunning:
+      if zkfc:
+        zkfc.start(env)
+
+  if action == "stop":
+    try:
+      zkfc = ZkfcSlave()
+      zkfc.status(env)
+    except ComponentIsNotRunning:
+      pass
+    else:
+      if zkfc:
+        zkfc.stop(env)
 
 
-def service(action=None, name=None, user=None, create_pid_dir=False,
+def failover_namenode():
+  """
+  Failover the primary namenode by killing zkfc if it exists on this host (assuming this host is the primary).
+  """
+  import params
+  check_service_cmd = format("hdfs haadmin -getServiceState {namenode_id}")
+  code, out = call(check_service_cmd, verbose=True, logoutput=True, user=params.hdfs_user)
+
+  state = "unknown"
+  if code == 0 and out:
+    state = "active" if "active" in out else ("standby" if "standby" in out else state)
+    Logger.info("Namenode service state: %s" % state)
+
+  if state == "active":
+    Logger.info("Rolling Upgrade - Initiating namenode failover by killing zkfc on active namenode")
+
+    # Forcefully kill ZKFC on this host to initiate a failover
+    kill_zkfc(params.hdfs_user)
+
+    # Wait until it transitions to standby
+    check_standby_cmd = format("hdfs haadmin -getServiceState {namenode_id} | grep standby")
+    Execute(check_standby_cmd,
+            user=params.hdfs_user,
+            tries=30,
+            try_sleep=6)
+  else:
+    Logger.info("Rolling Upgrade - Host %s is the standby namenode." % str(params.hostname))
+
+
+def kill_zkfc(zkfc_user):
+  """
+  There are two potential methods for failing over the namenode, especially during a Rolling Upgrade.
+  Option 1. Kill zkfc on primary namenode provided that the secondary is up and has zkfc running on it.
+  Option 2. Silent failover (not supported as of HDP 2.2.0.0)
+  :param zkfc_user: User that started the ZKFC process.
+  """
+  import params
+  if params.dfs_ha_enabled:
+    zkfc_pid_file = get_service_pid_file("zkfc", zkfc_user)
+    if zkfc_pid_file:
+      check_process = format("ls {zkfc_pid_file} > /dev/null 2>&1 && ps -p `cat {zkfc_pid_file}` > /dev/null 2>&1")
+      code, out = call(check_process, verbose=True)
+      if code == 0:
+        Logger.debug("ZKFC is running and will be killed to initiate namenode failover.")
+        kill_command = format("{check_process} && kill -9 `cat {zkfc_pid_file}` > /dev/null 2>&1")
+        checked_call(kill_command)
+
+
+def get_service_pid_file(name, user):
+  """
+  Get the pid file path that was used to start the service by the user.
+  :param name: Service name
+  :param user: User that started the service.
+  :return: PID file path
+  """
+  import params
+  pid_dir = format("{hadoop_pid_dir_prefix}/{user}")
+  pid_file = format("{pid_dir}/hadoop-{user}-{name}.pid")
+  return pid_file
+
+
+def service(action=None, name=None, user=None, options="", create_pid_dir=False,
             create_log_dir=False):
+  """
+  :param action: Either "start" or "stop"
+  :param name: Component name, e.g., "namenode", "datanode", "secondarynamenode", "zkfc"
+  :param user: User to run the command as
+  :param options: Additional options to pass to command as a string
+  :param create_pid_dir: Create PID directory
+  :param create_log_dir: Crate log file directory
+  """
   import params
 
+  options = options if options else ""
   pid_dir = format("{hadoop_pid_dir_prefix}/{user}")
   pid_file = format("{pid_dir}/hadoop-{user}-{name}.pid")
   log_dir = format("{hdfs_log_dir_prefix}/{user}")
@@ -76,15 +176,18 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
         except ComponentIsNotRunning:
           pass
 
-
   hadoop_daemon = format("{hadoop_bin}/hadoop-daemon.sh")
 
   if user == "root":
-    cmd = [hadoop_daemon, "--config", params.hadoop_conf_dir]
-    daemon_cmd = as_sudo(cmd + [action, name])
+    cmd = [hadoop_daemon, "--config", params.hadoop_conf_dir, action, name]
+    if options:
+      cmd += [options, ]
+    daemon_cmd = as_sudo(cmd)
   else:
-    cmd = format("{hadoop_daemon} --config {hadoop_conf_dir}")
-    daemon_cmd = as_user(format("{ulimit_cmd} {cmd} {action} {name}"), user)
+    cmd = format("{ulimit_cmd} {hadoop_daemon} --config {hadoop_conf_dir} {action} {name}")
+    if options:
+      cmd += " " + options
+    daemon_cmd = as_user(cmd, user)
      
   service_is_up = check_process if action == "start" else None
   #remove pid file from dead process
@@ -102,6 +205,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
          action="delete",
     )
 
+
 def get_port(address):
   """
   Extracts port from the address like 0.0.0.0:1019
@@ -114,6 +218,7 @@ def get_port(address):
   else:
     return None
 
+
 def is_secure_port(port):
   """
   Returns True if port is root-owned at *nix systems

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
index c58446e..ee8b418 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
@@ -18,7 +18,9 @@ limitations under the License.
 """
 
 from resource_management import *
-from utils import service
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import utils  # this is needed to avoid a circular dependency since utils.py calls this class
 from hdfs import hdfs
 
 
@@ -29,7 +31,7 @@ class ZkfcSlave(Script):
     self.install_packages(env, params.exclude_packages)
     env.set_params(params)
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
@@ -39,16 +41,16 @@ class ZkfcSlave(Script):
               owner=params.hdfs_user,
               group=params.user_group
     )
-    service(
+    utils.service(
       action="start", name="zkfc", user=params.hdfs_user, create_pid_dir=True,
       create_log_dir=True
     )
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
-    service(
+    utils.service(
       action="stop", name="zkfc", user=params.hdfs_user, create_pid_dir=True,
       create_log_dir=True
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_metastore.py
index 6ee5507..dc02a7d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_metastore.py
@@ -37,7 +37,7 @@ class HiveMetastore(Script):
 
     hive(name='metastore')
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
@@ -45,7 +45,7 @@ class HiveMetastore(Script):
                    action = 'start'
     )
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
index 2c48e4c..fa8ece4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
@@ -37,7 +37,7 @@ class HiveServer(Script):
       install_tez_jars()
     hive(name='hiveserver2')
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
@@ -50,7 +50,7 @@ class HiveServer(Script):
                   action = 'start'
     )
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_server.py
index 885cf06..91a699b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_server.py
@@ -40,12 +40,12 @@ class MysqlServer(Script):
     env.set_params(params)
     mysql_users.mysql_adduser(params)
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     mysql_service(daemon_name=params.daemon_name, action='start')
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     mysql_service(daemon_name=params.daemon_name, action='stop')

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
index 088cb41..a8b3a8f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
@@ -32,13 +32,13 @@ class WebHCatServer(Script):
     env.set_params(params)
     webhcat()
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     webhcat_service(action = 'start')
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_server.py
index 70414fc..f07e36d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_server.py
@@ -35,14 +35,14 @@ class OozieServer(Script):
 
     oozie(is_server=True)
     
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     #TODO remove this when config command will be implemented
     self.configure(env)
     oozie_service(action='start')
     
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     oozie_service(action='stop')

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/application_timeline_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/application_timeline_server.py
index 75a4710..26a6046 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/application_timeline_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/application_timeline_server.py
@@ -19,8 +19,10 @@ Ambari Agent
 
 """
 
-import sys
 from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+
 from yarn import yarn
 from service import service
 
@@ -35,13 +37,21 @@ class ApplicationTimelineServer(Script):
     env.set_params(params)
     yarn(name='apptimelineserver')
 
-  def start(self, env):
+  def pre_rolling_restart(self, env):
+    Logger.info("Executing Rolling Upgrade pre-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-yarn-timelineserver {version}"))
+
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     service('timelineserver', action='start')
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     service('timelineserver', action='stop')

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
index 2485d05..d2b6ee3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
@@ -18,9 +18,11 @@ limitations under the License.
 Ambari Agent
 
 """
-import sys
+
 from resource_management import *
 from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
 
 from yarn import yarn
 from service import service
@@ -34,14 +36,23 @@ class HistoryServer(Script):
     env.set_params(params)
     yarn(name="historyserver")
 
-  def start(self, env):
+  def pre_rolling_restart(self, env):
+    Logger.info("Executing Rolling Upgrade pre-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
+      copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
+
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     service('historyserver', action='stop', serviceName='mapreduce')

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
index 8e153e0..0fcf20d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
@@ -18,13 +18,32 @@ limitations under the License.
 Ambari Agent
 
 """
+import re
 
-import sys
 from resource_management import *
+from resource_management.libraries.functions.decorator import retry
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+from resource_management.core.shell import call
 
 from yarn import yarn
 from service import service
 
+
+@retry(times=10, sleep_time=2, err_class=Fail)
+def call_and_match_output(command, regex_expression, err_message):
+  """
+  Call the command and performs a regex match on the output for the specified expression.
+  :param command: Command to call
+  :param regex_expression: Regex expression to search in the output
+  """
+  # TODO Rolling Upgrade, does this work in Ubuntu? If it doesn't see dynamic_variable_interpretation.py to see how stdout was redirected
+  # to a temporary file, which was then read.
+  code, out = call(command, verbose=True)
+  if not (out and re.search(regex_expression, out, re.IGNORECASE)):
+    raise Fail(err_message)
+
+
 class Nodemanager(Script):
   def install(self, env):
     self.install_packages(env)
@@ -34,7 +53,15 @@ class Nodemanager(Script):
     env.set_params(params)
     yarn(name="nodemanager")
 
-  def start(self, env):
+  def pre_rolling_restart(self, env):
+    Logger.info("Executing Rolling Upgrade post-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-yarn-nodemanager {version}"))
+
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
@@ -42,7 +69,15 @@ class Nodemanager(Script):
             action='start'
     )
 
-  def stop(self, env):
+  def post_rolling_restart(self, env):
+    Logger.info("Executing Rolling Upgrade post-restart")
+    import params
+    env.set_params(params)
+
+    nm_status_command = format("yarn node -status {nm_address}")
+    call_and_match_output(nm_status_command, 'Node-State : RUNNING',  "Failed to check NodeManager status")
+
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index 3557d9d..c1bd382 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -20,6 +20,7 @@ Ambari Agent
 """
 import os
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
 from resource_management import *
 import status_params
 
@@ -31,6 +32,11 @@ tmp_dir = Script.get_tmp_dir()
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
 
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+hostname = config['hostname']
+
 #hadoop params
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
@@ -98,6 +104,9 @@ else:
 
 nm_webui_address = config['configurations']['yarn-site']['yarn.nodemanager.webapp.address']
 hs_webui_address = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.address']
+nm_address = config['configurations']['yarn-site']['yarn.nodemanager.address']  # still contains 0.0.0.0
+if hostname and nm_address and nm_address.startswith("0.0.0.0:"):
+  nm_address = nm_address.replace("0.0.0.0", hostname)
 
 nm_local_dirs = config['configurations']['yarn-site']['yarn.nodemanager.local-dirs']
 nm_log_dirs = config['configurations']['yarn-site']['yarn.nodemanager.log-dirs']
@@ -121,8 +130,6 @@ user_group = config['configurations']['cluster-env']['user_group']
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
 exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
 
-hostname = config['hostname']
-
 ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
 has_ats = not len(ats_host) == 0
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
index b87232f..5904d57 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
@@ -19,8 +19,8 @@ Ambari Agent
 
 """
 
-import sys
 from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 
 from yarn import yarn
 from service import service
@@ -36,7 +36,15 @@ class Resourcemanager(Script):
     env.set_params(params)
     yarn(name='resourcemanager')
 
-  def start(self, env):
+  def pre_rolling_restart(self, env):
+    Logger.info("Executing Rolling Upgrade post-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      Execute(format("hdp-select set hadoop-yarn-resourcemanager {version}"))
+
+  def start(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)
@@ -45,7 +53,7 @@ class Resourcemanager(Script):
             action='start'
     )
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
 
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
index 533e9b3..b52d6e3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
@@ -20,6 +20,7 @@ Ambari Agent
 """
 
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
 from resource_management import *
 import status_params
 
@@ -30,10 +31,13 @@ tmp_dir = Script.get_tmp_dir()
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
 
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
 #hadoop params
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   zk_home = '/usr/hdp/current/zookeeper-client'
-  zk_bin = '/usr/hdp/current/zookeeper-client/bin'
+  zk_bin = '/usr/hdp/current/zookeeper-client/bin'    # TODO Rolling Upgrade, needs to be server binary when starting server daemon...
   zk_cli_shell = '/usr/hdp/current/zookeeper-client/bin/zkCli.sh'
 else:
   zk_home = '/usr'

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/zookeeper_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/zookeeper_server.py
index a1a25b8..cc828a4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/zookeeper_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/zookeeper_server.py
@@ -19,19 +19,19 @@ Ambari Agent
 
 """
 
-import sys
 import re
+
 from resource_management import *
-from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.functions.decorator import retry
 from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
 from resource_management.core.shell import call
 
-
 from zookeeper import zookeeper
 from zookeeper_service import zookeeper_service
 
+
 @retry(times=10, sleep_time=2, err_class=Fail)
 def call_and_match_output(command, regex_expression, err_message):
   """
@@ -39,8 +39,10 @@ def call_and_match_output(command, regex_expression, err_message):
   :param command: Command to call
   :param regex_expression: Regex expression to search in the output
   """
+  # TODO Rolling Upgrade, does this work in Ubuntu? If it doesn't see dynamic_variable_interpretation.py to see how stdout was redirected
+  # to a temporary file, which was then read.
   code, out = call(command, verbose=True)
-  if not (out and re.search(regex_expression, out)):
+  if not (out and re.search(regex_expression, out, re.IGNORECASE)):
     raise Fail(err_message)
 
 
@@ -59,11 +61,10 @@ class ZookeeperServer(Script):
     import params
     env.set_params(params)
 
-    version = default("/commandParams/version", None)
-    if version and compare_versions(format_hdp_stack_version(version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set zookeeper-server {version}"))
 
-  def start(self, env):
+  def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env)
@@ -83,9 +84,9 @@ class ZookeeperServer(Script):
     quorum_err_message = "Failed to establish zookeeper quorum"
     call_and_match_output(create_command, 'Created', quorum_err_message)
     call_and_match_output(list_command, r"\[.*?" + unique + ".*?\]", quorum_err_message)
-    call(delete_command)
+    call(delete_command, verbose=True)
 
-  def stop(self, env):
+  def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     zookeeper_service(action = 'stop')

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
index 619fc3a..060470f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
@@ -32,36 +32,46 @@
 <upgrade xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
   <target>2.2.*.*</target>
   <order>
-  
+
     <group name="ZOOKEEPER" title="Zookeeper">
       <service name="ZOOKEEPER">
         <component>ZOOKEEPER_SERVER</component>
         <component>ZOOKEEPER_CLIENT</component>
       </service>
     </group>
-    
+
     <group name="CORE_MASTER" title="Core Masters">
       <service name="HDFS">
         <component>JOURNALNODE</component>
         <component>NAMENODE</component>
       </service>
+
+      <service name="MAPREDUCE2">
+        <component>HISTORYSERVER</component>
+      </service>
+
       <service name="YARN">
+        <component>APP_TIMELINE_SERVER</component>
         <component>RESOURCEMANAGER</component>
+        <component>NODEMANAGER</component>
       </service>
     </group>
-    
+
     <group name="CORE_SLAVES" title="Core Slaves" xsi:type="colocated">
       <service name="HDFS">
         <component>DATANODE</component>
       </service>
+
       <service name="HBASE">
         <component>REGIONSERVER</component>
       </service>
+
       <service name="YARN">
         <component>NODEMANAGER</component>
       </service>
+
       <batch>
-        <percent>20</percent>
+        <percent>33</percent>
         <message>Please run additional tests</message>
       </batch>
     </group>
@@ -113,6 +123,7 @@
         </upgrade>
       </component>
     </service>
+
     <service name="HDFS">
       <component name="NAMENODE">
         
@@ -224,11 +235,45 @@
         <upgrade>
           <task xsi:type="restart" />
         </upgrade>
-
       </component>
 
       <component name="JOURNALNODE">
-        <!-- Recommended after the Namenode, and only needed when HA is enabled. -->
+        <upgrade>
+          <task xsi:type="restart" />
+        </upgrade>
+      </component>
+
+    </service>
+
+    <service name="MAPREDUCE2">
+      <component name="HISTORYSERVER">
+        <upgrade>
+          <task xsi:type="restart" />
+        </upgrade>
+      </component>
+    </service>
+
+    <service name="YARN">
+      <component name="APP_TIMELINE_SERVER">
+        <upgrade>
+          <task xsi:type="restart" />
+        </upgrade>
+      </component>
+
+      <component name="RESOURCEMANAGER">
+        <!--
+        Upgrade+restart the standby ResourceManager first
+        Make the standby ResourceManager as the active
+
+        Automatic failover: This can be done by simply killing the current active ResourceManager
+        Manual failover: Make the standby ResourceManager as the active and the current active as standby
+        -->
+        <upgrade>
+          <task xsi:type="restart" />
+        </upgrade>
+      </component>
+
+      <component name="NODEMANAGER">
         <upgrade>
           <task xsi:type="restart" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b75e7ab/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index 88b6125..f40c638 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -86,9 +86,8 @@ public class UpgradeHelperTest {
     assertEquals("POST_CLUSTER", groups.get(4).name);
 
     assertEquals(6, groups.get(1).items.size());
-    assertEquals(5, groups.get(2).items.size());
+    assertEquals(2, groups.get(2).items.size());
     assertEquals(6, groups.get(3).items.size());
-
   }
 
   public Cluster makeCluster() throws AmbariException {