You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2016/06/21 23:22:15 UTC

ambari git commit: AMBARI-17280. RU to write out client configs that are dependencies of Hive, ATS, and Oozie during upgrades that change configs (alejandro)

Repository: ambari
Updated Branches:
  refs/heads/trunk 706946e84 -> 58198f533


AMBARI-17280. RU to write out client configs that are dependencies of Hive, ATS, and Oozie during upgrades that change configs (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/58198f53
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/58198f53
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/58198f53

Branch: refs/heads/trunk
Commit: 58198f5339b221e6b3c6d8fb2d61523f2ac0e3bf
Parents: 706946e
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Tue Jun 21 16:23:38 2016 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Tue Jun 21 16:23:38 2016 -0700

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          |  6 ++-
 .../libraries/functions/stack_select.py         |  3 ++
 .../libraries/script/script.py                  | 27 ++++++++++-
 .../internal/UpgradeResourceProvider.java       |  5 +-
 .../SPARK/1.2.1/package/scripts/setup_spark.py  | 25 +++++++---
 .../SPARK/1.2.1/package/scripts/spark_client.py | 37 +++++++++++++--
 .../0.4.0.2.1/package/scripts/params_linux.py   |  3 --
 .../TEZ/0.4.0.2.1/package/scripts/tez.py        | 24 +++++++---
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py | 39 +++++++++++++--
 .../package/scripts/mapreduce2_client.py        | 37 +++++++++++++--
 .../2.1.0.2.0/package/scripts/params_linux.py   |  3 ++
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      | 50 ++++++++++++--------
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     | 43 +++++++++++++++--
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     | 43 +++++++++++++++--
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     | 43 +++++++++++++++--
 .../python/stacks/2.1/TEZ/test_tez_client.py    |  1 +
 16 files changed, 321 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index e36a00d..c810438 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -20,12 +20,14 @@ limitations under the License.
 
 __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir", "get_package_dirs"]
 
+# Python Imports
 import copy
 import os
-import version
-import stack_select
 import subprocess
 
+# Local Imports
+import version
+import stack_select
 from resource_management.core import shell
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index b994fce..513ceac 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -18,9 +18,12 @@ limitations under the License.
 
 """
 
+# Python Imports
 import os
 import sys
 import re
+
+# Local Imports
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Execute

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 49dcb4e..77228a9 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -155,6 +155,29 @@ class Script(object):
     """
     pass
 
+  def get_config_dir_during_stack_upgrade(self, env, base_dir, conf_select_name):
+    """
+    Because this gets called during a Rolling Upgrade, the new configs have already been saved, so we must be
+    careful to only call configure() on the directory with the new version.
+
+    If valid, returns the config directory to save configs to, otherwise, return None
+    """
+    import params
+    env.set_params(params)
+
+    required_attributes = ["stack_name", "stack_root", "version"]
+    for attribute in required_attributes:
+      if not has_attr(params, attribute):
+        raise Fail("Failed in function 'stack_upgrade_save_new_config' because params was missing variable %s." % attribute)
+
+    Logger.info("stack_upgrade_save_new_config(): Checking if can write new client configs to new config version folder.")
+
+    if check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
+      # Even though hdp-select has not yet been called, write new configs to the new config directory.
+      config_path = os.path.join(params.stack_root, params.version, conf_select_name, "conf")
+      return os.path.realpath(config_path)
+    return None
+
   def save_component_version_to_structured_out(self):
     """
     :param stack_name: One of HDP, HDPWIN, PHD, BIGTOP.
@@ -709,9 +732,11 @@ class Script(object):
     """
     pass
 
-  def configure(self, env, upgrade_type=None):
+  def configure(self, env, upgrade_type=None, config_dir=None):
     """
     To be overridden by subclasses
+    :param upgrade_type: only valid during RU/EU, otherwise will be None
+    :param config_dir: for some clients during RU, the location to save configs to, otherwise None
     """
     self.fail_with_error('configure method isn\'t implemented')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index fb3ae69..ac0c78c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -1009,6 +1009,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     StackId currentStackId = cluster.getCurrentStackVersion();
     StackId desiredStackId = cluster.getDesiredStackVersion();
     StackId targetStackId = new StackId(targetStack);
+    // Only change configs if moving to a different stack.
     switch (direction) {
       case UPGRADE:
         if (currentStackId.equals(targetStackId)) {
@@ -1078,7 +1079,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       while (iterator.hasNext()) {
         String configType = iterator.next();
         if (skipConfigTypes.contains(configType)) {
-          LOG.info("RU: Removing configs for config-type {}", configType);
+          LOG.info("Stack Upgrade: Removing configs for config-type {}", configType);
           iterator.remove();
         }
       }
@@ -1089,7 +1090,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       for (Map.Entry<String, DesiredConfig> existingEntry : existingDesiredConfigurationsByType.entrySet()) {
         String configurationType = existingEntry.getKey();
         if(skipConfigTypes.contains(configurationType)) {
-          LOG.info("RU: Skipping config-type {} as upgrade-pack contains no updates to its service", configurationType);
+          LOG.info("Stack Upgrade: Skipping config-type {} as upgrade-pack contains no updates to its service", configurationType);
           continue;
         }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
index 63c72f7..275ab23 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
@@ -34,9 +34,20 @@ from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.resources.xml_config import XmlConfig
 
-def setup_spark(env, type, upgrade_type = None, action = None):
+def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
+  """
+  :param env: Python environment
+  :param type: Spark component type
+  :param upgrade_type: If in a stack upgrade, either UPGRADE_TYPE_ROLLING or UPGRADE_TYPE_NON_ROLLING
+  :param action: Action to perform, such as generate configs
+  :param config_dir: Optional config directory to write configs to.
+  """
+
   import params
 
+  if config_dir is None:
+    config_dir = params.spark_conf
+
   Directory([params.spark_pid_dir, params.spark_log_dir],
             owner=params.spark_user,
             group=params.user_group,
@@ -52,7 +63,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
     )
     params.HdfsResource(None, action="execute")
 
-  PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
+  PropertiesFile(os.path.join(config_dir, "spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],
     key_value_delimiter = " ",
     owner=params.spark_user,
@@ -60,7 +71,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   )
 
   # create spark-env.sh in etc/conf dir
-  File(os.path.join(params.spark_conf, 'spark-env.sh'),
+  File(os.path.join(config_dir, 'spark-env.sh'),
        owner=params.spark_user,
        group=params.spark_group,
        content=InlineTemplate(params.spark_env_sh),
@@ -68,7 +79,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   )
 
   #create log4j.properties in etc/conf dir
-  File(os.path.join(params.spark_conf, 'log4j.properties'),
+  File(os.path.join(config_dir, 'log4j.properties'),
        owner=params.spark_user,
        group=params.spark_group,
        content=params.spark_log4j_properties,
@@ -76,7 +87,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   )
 
   #create metrics.properties in etc/conf dir
-  File(os.path.join(params.spark_conf, 'metrics.properties'),
+  File(os.path.join(config_dir, 'metrics.properties'),
        owner=params.spark_user,
        group=params.spark_group,
        content=InlineTemplate(params.spark_metrics_properties)
@@ -96,7 +107,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
 
   if params.is_hive_installed:
     XmlConfig("hive-site.xml",
-          conf_dir=params.spark_conf,
+          conf_dir=config_dir,
           configurations=params.spark_hive_properties,
           owner=params.spark_user,
           group=params.spark_group,
@@ -116,7 +127,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
 
   if params.spark_thrift_fairscheduler_content and effective_version and check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
     # create spark-thrift-fairscheduler.xml
-    File(os.path.join(params.spark_conf,"spark-thrift-fairscheduler.xml"),
+    File(os.path.join(config_dir,"spark-thrift-fairscheduler.xml"),
       owner=params.spark_user,
       group=params.spark_group,
       mode=0755,

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
index ef41453..31bf4c6 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
@@ -17,15 +17,18 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
+# Python imports
+import os
 import sys
+
+# Local imports
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select, stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
+from ambari_commons.constants import UPGRADE_TYPE_ROLLING
 from resource_management.core.logger import Logger
-from resource_management.core import shell
 from setup_spark import setup_spark
 
 
@@ -34,11 +37,16 @@ class SparkClient(Script):
     self.install_packages(env)
     self.configure(env)
 
-  def configure(self, env, upgrade_type=None):
+  def configure(self, env, config_dir=None, upgrade_type=None):
+    """
+    :param env: Python environment
+    :param config_dir: During rolling upgrade, which config directory to save configs to.
+    :param upgrade_type:  If in the middle of a stack upgrade, whether rolling or non-rolling
+    """
     import params
     env.set_params(params)
     
-    setup_spark(env, 'client', upgrade_type=upgrade_type, action = 'config')
+    setup_spark(env, 'client', upgrade_type=upgrade_type, action='config', config_dir=config_dir)
 
   def status(self, env):
     raise ClientComponentHasNoStatus()
@@ -46,6 +54,27 @@ class SparkClient(Script):
   def get_component_name(self):
     return "spark-client"
 
+  def stack_upgrade_save_new_config(self, env):
+    """
+    Because this gets called during a Rolling Upgrade, the new configs have already been saved, so we must be
+    careful to only call configure() on the directory with the new version.
+    """
+    import params
+    env.set_params(params)
+
+    conf_select_name = "spark"
+    base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+    config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, conf_select_name)
+
+    if config_dir:
+      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s using version %s" % (conf_select_name, str(params.version)))
+
+      # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
+      # must change it now so this function can find the Jinja Templates for the service.
+      env.config.basedir = base_dir
+      conf_select.select(params.stack_name, conf_select_name, params.version)
+      self.configure(env, config_dir=config_dir, upgrade_type=UPGRADE_TYPE_ROLLING)
+
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 44239c7..6587151 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -78,9 +78,6 @@ tez_user = config['configurations']['tez-env']['tez_user']
 user_group = config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
-
-
-
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
index 67466e3..dfa6501 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
@@ -18,7 +18,10 @@ limitations under the License.
 Ambari Agent
 
 """
+# Python Imports
+import os
 
+# Local Imports
 from resource_management.core.resources.system import Directory, File
 from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.functions.format import format
@@ -27,35 +30,44 @@ from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def tez():
+def tez(config_dir):
+  """
+  Write out tez-site.xml and tez-env.sh to the config directory.
+  :param config_dir: Which config directory to save configs to, which is different during rolling upgrade.
+  """
   import params
 
   Directory(params.tez_etc_dir, mode=0755)
 
-  Directory(params.config_dir,
+  Directory(config_dir,
             owner = params.tez_user,
             group = params.user_group,
             create_parents = True)
 
   XmlConfig( "tez-site.xml",
-             conf_dir = params.config_dir,
+             conf_dir = config_dir,
              configurations = params.config['configurations']['tez-site'],
              configuration_attributes=params.config['configuration_attributes']['tez-site'],
              owner = params.tez_user,
              group = params.user_group,
              mode = 0664)
 
-  File(format("{config_dir}/tez-env.sh"),
+  tez_env_file_path = os.path.join(config_dir, "tez-env.sh")
+  File(tez_env_file_path,
        owner=params.tez_user,
        content=InlineTemplate(params.tez_env_sh_template),
        mode=0555)
 
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def tez():
+def tez(config_dir):
+  """
+  Write out tez-site.xml and tez-env.sh to the config directory.
+  :param config_dir: Directory to write configs to.
+  """
   import params
   XmlConfig("tez-site.xml",
-             conf_dir=params.tez_conf_dir,
+             conf_dir=config_dir,
              configurations=params.config['configurations']['tez-site'],
              owner=params.tez_user,
              mode="f",

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
index c79d63b..8018f0f 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
@@ -35,14 +35,22 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.default import default
+from resource_management.core.logger import Logger
 
 from tez import tez
 
 class TezClient(Script):
-  def configure(self, env):
+
+  def configure(self, env, config_dir=None, upgrade_type=None):
+    """
+    Write tez-site.xml and tez-env.sh to the config directory
+    :param env: Python Environment
+    :param config_dir: During rolling upgrade, which config directory to save configs to.
+    E.g., /usr/$STACK/current/tez-client/conf
+    """
     import params
     env.set_params(params)
-    tez()
+    tez(config_dir)
 
   def status(self, env):
     raise ClientComponentHasNoStatus()
@@ -53,6 +61,28 @@ class TezClientLinux(TezClient):
   def get_component_name(self):
     return "hadoop-client"
 
+  def stack_upgrade_save_new_config(self, env):
+    """
+    Because this gets called during a Rolling Upgrade, the new tez configs have already been saved, so we must be
+    careful to only call configure() on the directory of the new version.
+    :param env:
+    """
+    import params
+    env.set_params(params)
+
+    conf_select_name = "tez"
+    base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+    config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, conf_select_name)
+
+    if config_dir:
+      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s using version %s" % (conf_select_name, str(params.version)))
+
+      # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
+      # must change it now so this function can find the Jinja Templates for the service.
+      env.config.basedir = base_dir
+      conf_select.select(params.stack_name, conf_select_name, params.version)
+      self.configure(env, config_dir=config_dir)
+
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -63,8 +93,9 @@ class TezClientLinux(TezClient):
       stack_select.select("hadoop-client", params.version)
 
   def install(self, env):
+    import params
     self.install_packages(env)
-    self.configure(env)
+    self.configure(env, config_dir=params.config_dir)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class TezClientWindows(TezClient):
@@ -75,7 +106,7 @@ class TezClientWindows(TezClient):
       params.refresh_tez_state_dependent_params()
     env.set_params(params)
     self._install_lzo_support_if_needed(params)
-    self.configure(env)
+    self.configure(env, config_dir=params.tez_conf_dir)
 
   def _install_lzo_support_if_needed(self, params):
     hadoop_classpath_prefix = self._expand_hadoop_classpath_prefix(params.hadoop_classpath_prefix_template, params.config['configurations']['tez-site'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
index db22004..424157b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
@@ -18,8 +18,11 @@ limitations under the License.
 Ambari Agent
 
 """
-
+# Python imports
+import os
 import sys
+
+# Local imports
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select, stack_select
 from resource_management.libraries.functions.constants import StackFeature
@@ -28,21 +31,49 @@ from resource_management.core.exceptions import ClientComponentHasNoStatus
 from yarn import yarn
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
+from resource_management.core.logger import Logger
 
 
 class MapReduce2Client(Script):
   def install(self, env):
+    import params
     self.install_packages(env)
     self.configure(env)
 
-  def configure(self, env):
+  def configure(self, env, config_dir=None, upgrade_type=None):
+    """
+    :param env: Python environment
+    :param config_dir: During rolling upgrade, which config directory to save configs to.
+    """
     import params
     env.set_params(params)
-    yarn()
+    yarn(config_dir=config_dir)
 
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
+  def stack_upgrade_save_new_config(self, env):
+    """
+    Because this gets called during a Rolling Upgrade, the new mapreduce configs have already been saved, so we must be
+    careful to only call configure() on the directory of the new version.
+    :param env:
+    """
+    import params
+    env.set_params(params)
+
+    conf_select_name = "hadoop"
+    base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+    config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, conf_select_name)
+
+    if config_dir:
+      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s using version %s" % (conf_select_name, str(params.version)))
+
+      # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
+      # must change it now so this function can find the Jinja Templates for the service.
+      env.config.basedir = base_dir
+      conf_select.select(params.stack_name, conf_select_name, params.version)
+      self.configure(env, config_dir=config_dir)
+
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class MapReduce2ClientWindows(MapReduce2Client):

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 90f885a..f6bfae8 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -58,6 +58,9 @@ stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 tarball_map = default("/configurations/cluster-env/tarball_map", None)
 
+config_path = os.path.join(stack_root, "current/hadoop-client/conf")
+config_dir = os.path.realpath(config_path)
+
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted_major = format_stack_version(stack_version_unformatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index d1ec15b..ce312e0 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -95,8 +95,16 @@ def create_local_dir(dir_name):
   )
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def yarn(name = None):
+def yarn(name=None, config_dir=None):
+  """
+  :param name: Component name, apptimelineserver, nodemanager, resourcemanager, or None (defaults for client)
+  :param config_dir: Which config directory to write configs to, which could be different during rolling upgrade.
+  """
   import params
+
+  if config_dir is None:
+    config_dir = params.hadoop_conf_dir
+
   if name == "historyserver":
     if params.yarn_log_aggregation_enabled:
       params.HdfsResource(params.yarn_nm_app_log_dir,
@@ -228,7 +236,7 @@ def yarn(name = None):
   )
 
   XmlConfig("core-site.xml",
-            conf_dir=params.hadoop_conf_dir,
+            conf_dir=config_dir,
             configurations=params.config['configurations']['core-site'],
             configuration_attributes=params.config['configuration_attributes']['core-site'],
             owner=params.hdfs_user,
@@ -241,7 +249,7 @@ def yarn(name = None):
   # RU should rely on all available in <stack-root>/<version>/hadoop/conf
   if 'hdfs-site' in params.config['configurations']:
     XmlConfig("hdfs-site.xml",
-              conf_dir=params.hadoop_conf_dir,
+              conf_dir=config_dir,
               configurations=params.config['configurations']['hdfs-site'],
               configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
               owner=params.hdfs_user,
@@ -250,7 +258,7 @@ def yarn(name = None):
     )
 
   XmlConfig("mapred-site.xml",
-            conf_dir=params.hadoop_conf_dir,
+            conf_dir=config_dir,
             configurations=params.config['configurations']['mapred-site'],
             configuration_attributes=params.config['configuration_attributes']['mapred-site'],
             owner=params.yarn_user,
@@ -259,7 +267,7 @@ def yarn(name = None):
   )
 
   XmlConfig("yarn-site.xml",
-            conf_dir=params.hadoop_conf_dir,
+            conf_dir=config_dir,
             configurations=params.config['configurations']['yarn-site'],
             configuration_attributes=params.config['configuration_attributes']['yarn-site'],
             owner=params.yarn_user,
@@ -268,7 +276,7 @@ def yarn(name = None):
   )
 
   XmlConfig("capacity-scheduler.xml",
-            conf_dir=params.hadoop_conf_dir,
+            conf_dir=config_dir,
             configurations=params.config['configurations']['capacity-scheduler'],
             configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
             owner=params.yarn_user,
@@ -361,7 +369,7 @@ def yarn(name = None):
        content=Template('mapreduce.conf.j2')
   )
 
-  File(format("{hadoop_conf_dir}/yarn-env.sh"),
+  File(os.path.join(config_dir, "yarn-env.sh"),
        owner=params.yarn_user,
        group=params.user_group,
        mode=0755,
@@ -374,7 +382,7 @@ def yarn(name = None):
       mode=params.container_executor_mode
   )
 
-  File(format("{hadoop_conf_dir}/container-executor.cfg"),
+  File(os.path.join(config_dir, "container-executor.cfg"),
       group=params.user_group,
       mode=0644,
       content=Template('container-executor.cfg.j2')
@@ -393,7 +401,7 @@ def yarn(name = None):
     tc_mode = None
     tc_owner = params.hdfs_user
 
-  File(format("{hadoop_conf_dir}/mapred-env.sh"),
+  File(os.path.join(config_dir, "mapred-env.sh"),
        owner=tc_owner,
        mode=0755,
        content=InlineTemplate(params.mapred_env_sh_template)
@@ -405,21 +413,21 @@ def yarn(name = None):
          group=params.mapred_tt_group,
          mode=06050
     )
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+    File(os.path.join(config_dir, 'taskcontroller.cfg'),
          owner = tc_owner,
          mode = tc_mode,
          group = params.mapred_tt_group,
          content=Template("taskcontroller.cfg.j2")
     )
   else:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+    File(os.path.join(config_dir, 'taskcontroller.cfg'),
          owner=tc_owner,
          content=Template("taskcontroller.cfg.j2")
     )
 
   if "mapred-site" in params.config['configurations']:
     XmlConfig("mapred-site.xml",
-              conf_dir=params.hadoop_conf_dir,
+              conf_dir=config_dir,
               configurations=params.config['configurations']['mapred-site'],
               configuration_attributes=params.config['configuration_attributes']['mapred-site'],
               owner=params.mapred_user,
@@ -428,7 +436,7 @@ def yarn(name = None):
 
   if "capacity-scheduler" in params.config['configurations']:
     XmlConfig("capacity-scheduler.xml",
-              conf_dir=params.hadoop_conf_dir,
+              conf_dir=config_dir,
               configurations=params.config['configurations'][
                 'capacity-scheduler'],
               configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
@@ -437,7 +445,7 @@ def yarn(name = None):
     )
   if "ssl-client" in params.config['configurations']:
     XmlConfig("ssl-client.xml",
-              conf_dir=params.hadoop_conf_dir,
+              conf_dir=config_dir,
               configurations=params.config['configurations']['ssl-client'],
               configuration_attributes=params.config['configuration_attributes']['ssl-client'],
               owner=params.hdfs_user,
@@ -461,28 +469,28 @@ def yarn(name = None):
 
   if "ssl-server" in params.config['configurations']:
     XmlConfig("ssl-server.xml",
-              conf_dir=params.hadoop_conf_dir,
+              conf_dir=config_dir,
               configurations=params.config['configurations']['ssl-server'],
               configuration_attributes=params.config['configuration_attributes']['ssl-server'],
               owner=params.hdfs_user,
               group=params.user_group
     )
-  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
-    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
+  if os.path.exists(os.path.join(config_dir, 'fair-scheduler.xml')):
+    File(os.path.join(config_dir, 'fair-scheduler.xml'),
          owner=params.mapred_user,
          group=params.user_group
     )
 
   if os.path.exists(
-    os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
+    os.path.join(config_dir, 'ssl-client.xml.example')):
+    File(os.path.join(config_dir, 'ssl-client.xml.example'),
          owner=params.mapred_user,
          group=params.user_group
     )
 
   if os.path.exists(
-    os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
+    os.path.join(config_dir, 'ssl-server.xml.example')):
+    File(os.path.join(config_dir, 'ssl-server.xml.example'),
          owner=params.mapred_user,
          group=params.user_group
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index d424ff9..5284d7a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -132,6 +132,44 @@
       </service>
     </group>
 
+    <!-- This group should exist for all RUs that cross a major stack version. -->
+    <group xsi:type="cluster" name="UPDATE_CLIENT_CONFIGS" title="Update Client Configs">
+      <direction>UPGRADE</direction>
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Update tez.lib.uris">
+        <task xsi:type="configure" id="hdp_2_4_0_0_tez_client_adjust_tez_lib_uris_property"/>
+      </execute-stage>
+
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
+        <task xsi:type="server_action" summary="Verifying LZO codec path for Tez" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
+      </execute-stage>
+    </group>
+
+    <!-- This needs to be done for every Rolling Upgrade pack that changes configurations. -->
+    <group xsi:type="cluster" name="CONFIGURE_CLIENT_DEPENDENCIES" title="Write client configs">
+      <skippable>true</skippable>
+      <supports-auto-skip-failure>false</supports-auto-skip-failure>
+      <direction>UPGRADE</direction>
+
+      <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Write Mapreduce2 Client configs">
+        <task xsi:type="execute">
+          <script>scripts/mapreduce2_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Write Tez Client configs">
+        <task xsi:type="execute">
+          <script>scripts/tez_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Write Spark Client configs">
+        <task xsi:type="execute">
+          <script>scripts/spark_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="CORE_MASTER" title="Core Masters">
       <service-check>false</service-check>
       <service name="HDFS">
@@ -629,11 +667,6 @@
 
     <service name="TEZ">
       <component name="TEZ_CLIENT">
-        <pre-upgrade>
-          <task xsi:type="configure" id="hdp_2_4_0_0_tez_client_adjust_tez_lib_uris_property"/>
-
-          <task xsi:type="server_action" summary="Verifying LZO codec path for Tez" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
-        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index 1364127..f03e21c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -139,6 +139,44 @@
       </service>
     </group>
 
+    <!-- This group should exist for all RUs that cross a major stack version. -->
+    <group xsi:type="cluster" name="UPDATE_CLIENT_CONFIGS" title="Update Client Configs">
+      <direction>UPGRADE</direction>
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Update tez.lib.uris">
+        <task xsi:type="configure" id="hdp_2_5_0_0_tez_client_adjust_tez_lib_uris_property"/>
+      </execute-stage>
+
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
+        <task xsi:type="server_action" summary="Verifying LZO codec path for Tez" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
+      </execute-stage>
+    </group>
+
+    <!-- This needs to be done for every Rolling Upgrade pack that changes configurations. -->
+    <group xsi:type="cluster" name="CONFIGURE_CLIENT_DEPENDENCIES" title="Write client configs">
+      <skippable>true</skippable>
+      <supports-auto-skip-failure>false</supports-auto-skip-failure>
+      <direction>UPGRADE</direction>
+
+      <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Write Mapreduce2 Client configs">
+        <task xsi:type="execute">
+          <script>scripts/mapreduce2_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Write Tez Client configs">
+        <task xsi:type="execute">
+          <script>scripts/tez_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Write Spark Client configs">
+        <task xsi:type="execute">
+          <script>scripts/spark_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="CORE_MASTER" title="Core Masters">
       <service-check>false</service-check>
       <service name="HDFS">
@@ -651,11 +689,6 @@
 
     <service name="TEZ">
       <component name="TEZ_CLIENT">
-        <pre-upgrade>
-          <task xsi:type="configure" id="hdp_2_5_0_0_tez_client_adjust_tez_lib_uris_property"/>
-
-          <task xsi:type="server_action" summary="Verifying LZO codec path for Tez" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
-        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index fb59d45..1193c06 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -134,6 +134,44 @@
       </service>
     </group>
 
+    <!-- This group should exist for all RUs that cross a major stack version. -->
+    <group xsi:type="cluster" name="UPDATE_CLIENT_CONFIGS" title="Update Client Configs">
+      <direction>UPGRADE</direction>
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Update tez.lib.uris">
+        <task xsi:type="configure" id="hdp_2_5_0_0_tez_client_adjust_tez_lib_uris_property"/>
+      </execute-stage>
+
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Verify LZO codec path for Tez">
+        <task xsi:type="server_action" summary="Verifying LZO codec path for Tez" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
+      </execute-stage>
+    </group>
+
+    <!-- This needs to be done for every Rolling Upgrade pack that changes configurations. -->
+    <group xsi:type="cluster" name="CONFIGURE_CLIENT_DEPENDENCIES" title="Write client configs">
+      <skippable>true</skippable>
+      <supports-auto-skip-failure>false</supports-auto-skip-failure>
+      <direction>UPGRADE</direction>
+
+      <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Write Mapreduce2 Client configs">
+        <task xsi:type="execute">
+          <script>scripts/mapreduce2_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+      <execute-stage service="TEZ" component="TEZ_CLIENT" title="Write Tez Client configs">
+        <task xsi:type="execute">
+          <script>scripts/tez_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Write Spark Client configs">
+        <task xsi:type="execute">
+          <script>scripts/spark_client.py</script>
+          <function>stack_upgrade_save_new_config</function>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="CORE_MASTER" title="Core Masters">
       <service-check>false</service-check>
       <service name="HDFS">
@@ -642,11 +680,6 @@
 
     <service name="TEZ">
       <component name="TEZ_CLIENT">
-        <pre-upgrade>
-          <task xsi:type="configure" id="hdp_2_5_0_0_tez_client_adjust_tez_lib_uris_property"/>
-
-          <task xsi:type="server_action" summary="Verifying LZO codec path for Tez" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath"/>
-        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/58198f53/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index e53eb4b..73615a4 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -32,6 +32,7 @@ class TestTezClient(RMFTestCase):
                        classname = "TezClient",
                        command = "configure",
                        config_file="default.json",
+                       command_args=["/etc/tez/conf", ],
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )