You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2015/10/05 18:00:37 UTC

ambari git commit: AMBARI-13299. Stop-and-Start Upgrade: Fix disabled unit tests (dlysnichenko)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-stop-all-upgrade 094ff7eb7 -> 965369f96


AMBARI-13299. Stop-and-Start Upgrade: Fix disabled unit tests (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/965369f9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/965369f9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/965369f9

Branch: refs/heads/branch-dev-stop-all-upgrade
Commit: 965369f96b0185552dad6eaffc32ccf3a4e6a95b
Parents: 094ff7e
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Oct 5 19:01:58 2015 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Oct 5 19:01:58 2015 +0300

----------------------------------------------------------------------
 .../state/stack/upgrade/ConfigureTask.java      |   9 +
 .../server/state/stack/upgrade/Grouping.java    |   3 +
 .../UpgradeResourceProviderHDP22Test.java       |   1 +
 .../ambari/server/state/UpgradeHelperTest.java  | 424 ++++++++++---------
 .../state/stack/ConfigUpgradePackTest.java      |  74 +++-
 .../server/state/stack/UpgradePackTest.java     | 178 ++++----
 .../HDP/2.1.1/upgrades/config-upgrade.xml       | 101 +++++
 .../stacks/HDP/2.1.1/upgrades/upgrade_test.xml  |  51 +--
 .../HDP/2.1.1/upgrades/upgrade_test_checks.xml  |   7 +-
 .../HDP/2.2.0/upgrades/config-upgrade.xml       | 101 +++++
 .../HDP/2.2.0/upgrades/upgrade_test_checks.xml  |   9 +-
 11 files changed, 574 insertions(+), 384 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
index a85c416..1164335 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
@@ -125,6 +125,15 @@ public class ConfigureTask extends ServerSideActionTask {
   }
 
   /**
+   * This getter is intended to be used only from tests. In production,
+   * getConfigurationChanges() logic should be used instead
+   * @return id of config upgrade change definition as defined in upgrade pack
+   */
+  public String getId() {
+    return id;
+  }
+
+  /**
    * Gets a map containing the following properties pertaining to the
    * configuration value to change:
    * <ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
index 36a0194..4116101 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
@@ -199,8 +199,11 @@ public class Grouping {
   }
 
   private static class TaskBucket {
+
     private StageWrapper.Type type;
+
     private List<Task> tasks = new ArrayList<Task>();
+
     private TaskBucket(Task initial) {
       switch (initial.getType()) {
         case CONFIGURE:

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
index b86380a..bd0726c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
@@ -269,6 +269,7 @@ public class UpgradeResourceProviderHDP22Test {
     assertEquals(1, upgrades.size());
 
     UpgradeEntity upgrade = upgrades.get(0);
+    assertEquals("upgrade_test", upgrade.getUpgradePackage());
     assertEquals(3, upgrade.getUpgradeGroups().size());
 
     UpgradeGroupEntity group = upgrade.getUpgradeGroups().get(2);

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index abf3939..1ef0c84 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.state;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
@@ -35,6 +36,7 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import com.google.gson.reflect.TypeToken;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -49,13 +51,9 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.stack.HostsType;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.state.UpgradeHelper.UpgradeGroupHolder;
+import org.apache.ambari.server.state.stack.ConfigUpgradePack;
 import org.apache.ambari.server.state.stack.UpgradePack;
-import org.apache.ambari.server.state.stack.upgrade.Direction;
-import org.apache.ambari.server.state.stack.upgrade.ManualTask;
-import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
-import org.apache.ambari.server.state.stack.upgrade.Task;
-import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
-import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.apache.ambari.server.state.stack.upgrade.*;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
@@ -63,17 +61,18 @@ import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
 import com.google.inject.Binder;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.Module;
 
 import com.google.inject.persist.PersistService;
+import com.google.inject.util.Modules;
 
 /**
  * Tests the {@link UpgradeHelper} class
  */
-@Ignore   // TODO: fix unit tests
 public class UpgradeHelperTest {
 
   private static final StackId HDP_21 = new StackId("HPD-2.1.1");
@@ -100,6 +99,13 @@ public class UpgradeHelperTest {
         EasyMock.anyObject(Cluster.class), EasyMock.eq("{{foo/bar}}"))).andReturn(
         "placeholder-rendered-properly").anyTimes();
 
+    expect(
+        m_configHelper.getEffectiveDesiredTags(
+            EasyMock.anyObject(Cluster.class), EasyMock.anyObject(String.class))).
+        andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
+
+    replay(m_configHelper);
+
     final InMemoryDefaultTestModule injectorModule = new InMemoryDefaultTestModule() {
       @Override
       protected void configure() {
@@ -107,8 +113,9 @@ public class UpgradeHelperTest {
       }
     };
 
+    MockModule mockModule = new MockModule();
     // create an injector which will inject the mocks
-    injector = Guice.createInjector(injectorModule);
+    injector = Guice.createInjector(Modules.override(injectorModule).with(mockModule));
     injector.getInstance(GuiceJpaInitializer.class);
 
     helper = injector.getInstance(OrmTestHelper.class);
@@ -412,203 +419,203 @@ public class UpgradeHelperTest {
         manualTask.message);
   }
 
-// TODO: fixme
-//  @Test
-//  public void testConditionalDeleteTask() throws Exception {
-//    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
-//    assertTrue(upgrades.containsKey("upgrade_test"));
-//    UpgradePack upgrade = upgrades.get("upgrade_test");
-//    assertNotNull(upgrade);
-//
-//    Cluster cluster = makeCluster();
-//
-//    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
-//                                                HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
-//
-//    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
-//
-//    assertEquals(6, groups.size());
-//
-//    // grab the configure task out of Hive
-//    UpgradeGroupHolder hiveGroup = groups.get(4);
-//    assertEquals("HIVE", hiveGroup.name);
-//    ConfigureTask configureTask = (ConfigureTask) hiveGroup.items.get(1).getTasks().get(
-//        1).getTasks().get(0);
-//
-//    // now change the thrift port to http to have the 2nd condition invoked
-//    Map<String, String> hiveConfigs = new HashMap<String, String>();
-//    hiveConfigs.put("hive.server2.transport.mode", "http");
-//    hiveConfigs.put("hive.server2.thrift.port", "10001");
-//    ConfigurationRequest configurationRequest = new ConfigurationRequest();
-//    configurationRequest.setClusterName(cluster.getClusterName());
-//    configurationRequest.setType("hive-site");
-//    configurationRequest.setVersionTag("version2");
-//    configurationRequest.setProperties(hiveConfigs);
-//
-//    final ClusterRequest clusterRequest = new ClusterRequest(
-//        cluster.getClusterId(), cluster.getClusterName(),
-//        cluster.getDesiredStackVersion().getStackVersion(), null);
-//
-//    clusterRequest.setDesiredConfig(Collections.singletonList(configurationRequest));
-//    m_managementController.updateClusters(new HashSet<ClusterRequest>() {
-//      {
-//        add(clusterRequest);
-//      }
-//    }, null);
-//
-//    Map<String, String> configProperties = configureTask.getConfigurationChanges(cluster);
-//    assertFalse(configProperties.isEmpty());
-//    assertEquals(configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
-//
-//    String configurationJson = configProperties.get(ConfigureTask.PARAMETER_TRANSFERS);
-//    assertNotNull(configurationJson);
-//
-//    List<Transfer> transfers = m_gson.fromJson(configurationJson,
-//            new TypeToken<List<Transfer>>() { }.getType());
-//
-//    assertEquals(8, transfers.size());
-//    assertEquals("copy-key", transfers.get(0).fromKey);
-//    assertEquals("copy-key-to", transfers.get(0).toKey);
-//
-//    assertEquals("move-key", transfers.get(1).fromKey);
-//    assertEquals("move-key-to", transfers.get(1).toKey);
-//
-//    assertEquals("delete-key", transfers.get(2).deleteKey);
-//
-//    assertEquals("delete-http", transfers.get(3).deleteKey);
-//    assertEquals("delete-null-if-value", transfers.get(4).deleteKey);
-//    assertEquals("delete-blank-if-key", transfers.get(5).deleteKey);
-//    assertEquals("delete-blank-if-type", transfers.get(6).deleteKey);
-//    assertEquals("delete-thrift", transfers.get(7).deleteKey);
-//  }
-
-
-// TODO: fixme
-//  @Test
-//  public void testConfigureTask() throws Exception {
-//    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
-//    assertTrue(upgrades.containsKey("upgrade_test"));
-//    UpgradePack upgrade = upgrades.get("upgrade_test");
-//    assertNotNull(upgrade);
-//
-//    Cluster cluster = makeCluster();
-//
-//    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
-//        HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
-//
-//    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade,
-//        context);
-//
-//    assertEquals(6, groups.size());
-//
-//    // grab the configure task out of Hive
-//    UpgradeGroupHolder hiveGroup = groups.get(4);
-//    assertEquals("HIVE", hiveGroup.name);
-//    ConfigureTask configureTask = (ConfigureTask) hiveGroup.items.get(1).getTasks().get(
-//        0).getTasks().get(0);
-//
-//    Map<String, String> configProperties = configureTask.getConfigurationChanges(cluster);
-//    assertFalse(configProperties.isEmpty());
-//    assertEquals(configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
-//
-//    String configurationJson = configProperties.get(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS);
-//    assertNotNull(configurationJson);
-//
-//    List<ConfigurationKeyValue> keyValuePairs = m_gson.fromJson(configurationJson,
-//        new TypeToken<List<ConfigurationKeyValue>>() {
-//        }.getType());
-//
-//    assertEquals("hive.server2.thrift.port", keyValuePairs.get(0).key);
-//    assertEquals("10010", keyValuePairs.get(0).value);
-//
-//    // now change the thrift port to http to have the 2nd condition invoked
-//    Map<String, String> hiveConfigs = new HashMap<String, String>();
-//    hiveConfigs.put("hive.server2.transport.mode", "http");
-//    hiveConfigs.put("hive.server2.thrift.port", "10001");
-//    ConfigurationRequest configurationRequest = new ConfigurationRequest();
-//    configurationRequest.setClusterName(cluster.getClusterName());
-//    configurationRequest.setType("hive-site");
-//    configurationRequest.setVersionTag("version2");
-//    configurationRequest.setProperties(hiveConfigs);
-//
-//    final ClusterRequest clusterRequest = new ClusterRequest(
-//        cluster.getClusterId(), cluster.getClusterName(),
-//        cluster.getDesiredStackVersion().getStackVersion(), null);
-//
-//    clusterRequest.setDesiredConfig(Collections.singletonList(configurationRequest));
-//    m_managementController.updateClusters(new HashSet<ClusterRequest>() {
-//      {
-//        add(clusterRequest);
-//      }
-//    }, null);
-//
-//    // the configure task should now return different properties
-//    configProperties = configureTask.getConfigurationChanges(cluster);
-//    assertFalse(configProperties.isEmpty());
-//    assertEquals( configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
-//
-//    configurationJson = configProperties.get(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS);
-//    assertNotNull(configurationJson);
-//
-//    keyValuePairs = m_gson.fromJson(configurationJson,
-//        new TypeToken<List<ConfigurationKeyValue>>() {
-//        }.getType());
-//
-//    assertEquals("hive.server2.http.port", keyValuePairs.get(0).key);
-//    assertEquals("10011", keyValuePairs.get(0).value);
-//  }
-
-// TODO: fixme
-//  @Test
-//  public void testConfigureTaskWithMultipleConfigurations() throws Exception {
-//    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
-//    assertTrue(upgrades.containsKey("upgrade_test"));
-//    UpgradePack upgrade = upgrades.get("upgrade_test");
-//    assertNotNull(upgrade);
-//    Cluster cluster = makeCluster();
-//
-//    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21, HDP_21,
-//        UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
-//
-//    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
-//
-//    assertEquals(6, groups.size());
-//
-//    // grab the configure task out of Hive
-//    UpgradeGroupHolder hiveGroup = groups.get(4);
-//    assertEquals("HIVE", hiveGroup.name);
-//    ConfigureTask configureTask = (ConfigureTask) hiveGroup.items.get(1).getTasks().get(1).getTasks().get(0);
-//
-//    Map<String, String> configProperties = configureTask.getConfigurationChanges(cluster);
-//    assertFalse(configProperties.isEmpty());
-//    assertEquals(configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
-//
-//    String configurationJson = configProperties.get(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS);
-//    String transferJson = configProperties.get(ConfigureTask.PARAMETER_TRANSFERS);
-//    assertNotNull(configurationJson);
-//    assertNotNull(transferJson);
-//
-//    List<ConfigurationKeyValue> keyValuePairs = m_gson.fromJson(configurationJson,
-//        new TypeToken<List<ConfigurationKeyValue>>() {
-//        }.getType());
-//
-//    List<Transfer> transfers = m_gson.fromJson(transferJson,
-//        new TypeToken<List<Transfer>>() {
-//        }.getType());
-//
-//    assertEquals("fooKey", keyValuePairs.get(0).key);
-//    assertEquals("fooValue", keyValuePairs.get(0).value);
-//    assertEquals("fooKey2", keyValuePairs.get(1).key);
-//    assertEquals("fooValue2", keyValuePairs.get(1).value);
-//    assertEquals("fooKey3", keyValuePairs.get(2).key);
-//    assertEquals("fooValue3", keyValuePairs.get(2).value);
-//
-//    assertEquals("copy-key", transfers.get(0).fromKey);
-//    assertEquals("copy-key-to", transfers.get(0).toKey);
-//
-//    assertEquals("move-key", transfers.get(1).fromKey);
-//    assertEquals("move-key-to", transfers.get(1).toKey);
-//  }
+  @Test
+  public void testConditionalDeleteTask() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+    assertTrue(upgrades.containsKey("upgrade_test"));
+    UpgradePack upgrade = upgrades.get("upgrade_test");
+    ConfigUpgradePack cup = ambariMetaInfo.getConfigUpgradePack("HDP", "2.1.1");
+    assertNotNull(upgrade);
+
+    Cluster cluster = makeCluster();
+
+    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
+                                                HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+
+    assertEquals(6, groups.size());
+
+    // grab the configure task out of Hive
+    UpgradeGroupHolder hiveGroup = groups.get(4);
+    assertEquals("HIVE", hiveGroup.name);
+    ConfigureTask configureTask = (ConfigureTask) hiveGroup.items.get(1).getTasks().get(
+        1).getTasks().get(0);
+
+    // now change the thrift port to http to have the 2nd condition invoked
+    Map<String, String> hiveConfigs = new HashMap<String, String>();
+    hiveConfigs.put("hive.server2.transport.mode", "http");
+    hiveConfigs.put("hive.server2.thrift.port", "10001");
+    ConfigurationRequest configurationRequest = new ConfigurationRequest();
+    configurationRequest.setClusterName(cluster.getClusterName());
+    configurationRequest.setType("hive-site");
+    configurationRequest.setVersionTag("version2");
+    configurationRequest.setProperties(hiveConfigs);
+
+    final ClusterRequest clusterRequest = new ClusterRequest(
+        cluster.getClusterId(), cluster.getClusterName(),
+        cluster.getDesiredStackVersion().getStackVersion(), null);
+
+    clusterRequest.setDesiredConfig(Collections.singletonList(configurationRequest));
+    m_managementController.updateClusters(new HashSet<ClusterRequest>() {
+      {
+        add(clusterRequest);
+      }
+    }, null);
+
+    Map<String, String> configProperties = configureTask.getConfigurationChanges(cluster, cup);
+    assertFalse(configProperties.isEmpty());
+    assertEquals(configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
+
+    String configurationJson = configProperties.get(ConfigureTask.PARAMETER_TRANSFERS);
+    assertNotNull(configurationJson);
+
+    List<ConfigUpgradeChangeDefinition.Transfer> transfers = m_gson.fromJson(configurationJson,
+            new TypeToken<List<ConfigUpgradeChangeDefinition.Transfer>>() { }.getType());
+
+    assertEquals(8, transfers.size());
+    assertEquals("copy-key", transfers.get(0).fromKey);
+    assertEquals("copy-key-to", transfers.get(0).toKey);
+
+    assertEquals("move-key", transfers.get(1).fromKey);
+    assertEquals("move-key-to", transfers.get(1).toKey);
+
+    assertEquals("delete-key", transfers.get(2).deleteKey);
+
+    assertEquals("delete-http", transfers.get(3).deleteKey);
+    assertEquals("delete-null-if-value", transfers.get(4).deleteKey);
+    assertEquals("delete-blank-if-key", transfers.get(5).deleteKey);
+    assertEquals("delete-blank-if-type", transfers.get(6).deleteKey);
+    assertEquals("delete-thrift", transfers.get(7).deleteKey);
+  }
+
+
+  @Test
+  public void testConfigureTask() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+    assertTrue(upgrades.containsKey("upgrade_test"));
+    UpgradePack upgrade = upgrades.get("upgrade_test");
+    ConfigUpgradePack cup = ambariMetaInfo.getConfigUpgradePack("HDP", "2.1.1");
+    assertNotNull(upgrade);
+
+    Cluster cluster = makeCluster();
+
+    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
+        HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade,
+        context);
+
+    assertEquals(6, groups.size());
+
+    // grab the configure task out of Hive
+    UpgradeGroupHolder hiveGroup = groups.get(4);
+    assertEquals("HIVE", hiveGroup.name);
+    ConfigureTask configureTask = (ConfigureTask) hiveGroup.items.get(1).getTasks().get(
+        0).getTasks().get(0);
+
+    Map<String, String> configProperties = configureTask.getConfigurationChanges(cluster, cup);
+    assertFalse(configProperties.isEmpty());
+    assertEquals(configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
+
+    String configurationJson = configProperties.get(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS);
+    assertNotNull(configurationJson);
+
+    List<ConfigUpgradeChangeDefinition.ConfigurationKeyValue> keyValuePairs = m_gson.fromJson(configurationJson,
+        new TypeToken<List<ConfigUpgradeChangeDefinition.ConfigurationKeyValue>>() {
+        }.getType());
+
+    assertEquals("hive.server2.thrift.port", keyValuePairs.get(0).key);
+    assertEquals("10010", keyValuePairs.get(0).value);
+
+    // now change the thrift port to http to have the 2nd condition invoked
+    Map<String, String> hiveConfigs = new HashMap<String, String>();
+    hiveConfigs.put("hive.server2.transport.mode", "http");
+    hiveConfigs.put("hive.server2.thrift.port", "10001");
+    ConfigurationRequest configurationRequest = new ConfigurationRequest();
+    configurationRequest.setClusterName(cluster.getClusterName());
+    configurationRequest.setType("hive-site");
+    configurationRequest.setVersionTag("version2");
+    configurationRequest.setProperties(hiveConfigs);
+
+    final ClusterRequest clusterRequest = new ClusterRequest(
+        cluster.getClusterId(), cluster.getClusterName(),
+        cluster.getDesiredStackVersion().getStackVersion(), null);
+
+    clusterRequest.setDesiredConfig(Collections.singletonList(configurationRequest));
+    m_managementController.updateClusters(new HashSet<ClusterRequest>() {
+      {
+        add(clusterRequest);
+      }
+    }, null);
+
+    // the configure task should now return different properties
+    configProperties = configureTask.getConfigurationChanges(cluster, cup);
+    assertFalse(configProperties.isEmpty());
+    assertEquals( configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
+
+    configurationJson = configProperties.get(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS);
+    assertNotNull(configurationJson);
+
+    keyValuePairs = m_gson.fromJson(configurationJson,
+        new TypeToken<List<ConfigUpgradeChangeDefinition.ConfigurationKeyValue>>() {
+        }.getType());
+
+    assertEquals("hive.server2.http.port", keyValuePairs.get(0).key);
+    assertEquals("10011", keyValuePairs.get(0).value);
+  }
+
+  @Test
+  public void testConfigureTaskWithMultipleConfigurations() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+    assertTrue(upgrades.containsKey("upgrade_test"));
+    UpgradePack upgrade = upgrades.get("upgrade_test");
+    ConfigUpgradePack cup = ambariMetaInfo.getConfigUpgradePack("HDP", "2.1.1");
+    assertNotNull(upgrade);
+    Cluster cluster = makeCluster();
+
+    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21, HDP_21,
+        UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+
+    assertEquals(6, groups.size());
+
+    // grab the configure task out of Hive
+    UpgradeGroupHolder hiveGroup = groups.get(4);
+    assertEquals("HIVE", hiveGroup.name);
+    ConfigureTask configureTask = (ConfigureTask) hiveGroup.items.get(1).getTasks().get(1).getTasks().get(0);
+
+    Map<String, String> configProperties = configureTask.getConfigurationChanges(cluster, cup);
+    assertFalse(configProperties.isEmpty());
+    assertEquals(configProperties.get(ConfigureTask.PARAMETER_CONFIG_TYPE), "hive-site");
+
+    String configurationJson = configProperties.get(ConfigureTask.PARAMETER_KEY_VALUE_PAIRS);
+    String transferJson = configProperties.get(ConfigureTask.PARAMETER_TRANSFERS);
+    assertNotNull(configurationJson);
+    assertNotNull(transferJson);
+
+    List<ConfigUpgradeChangeDefinition.ConfigurationKeyValue> keyValuePairs = m_gson.fromJson(configurationJson,
+        new TypeToken<List<ConfigUpgradeChangeDefinition.ConfigurationKeyValue>>() {
+        }.getType());
+
+    List<ConfigUpgradeChangeDefinition.Transfer> transfers = m_gson.fromJson(transferJson,
+        new TypeToken<List<ConfigUpgradeChangeDefinition.Transfer>>() {
+        }.getType());
+
+    assertEquals("fooKey", keyValuePairs.get(0).key);
+    assertEquals("fooValue", keyValuePairs.get(0).value);
+    assertEquals("fooKey2", keyValuePairs.get(1).key);
+    assertEquals("fooValue2", keyValuePairs.get(1).value);
+    assertEquals("fooKey3", keyValuePairs.get(2).key);
+    assertEquals("fooValue3", keyValuePairs.get(2).value);
+
+    assertEquals("copy-key", transfers.get(0).fromKey);
+    assertEquals("copy-key-to", transfers.get(0).toKey);
+
+    assertEquals("move-key", transfers.get(1).fromKey);
+    assertEquals("move-key-to", transfers.get(1).toKey);
+  }
 
   @Test
   public void testServiceCheckUpgradeStages() throws Exception {
@@ -1002,13 +1009,8 @@ public class UpgradeHelperTest {
 
 
 
-  /**
-   *
-   */
   private class MockModule implements Module {
-    /**
-    *
-    */
+
     @Override
     public void configure(Binder binder) {
       binder.bind(ConfigHelper.class).toInstance(m_configHelper);

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradePackTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradePackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradePackTest.java
index da5c0ab..388a81f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradePackTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/ConfigUpgradePackTest.java
@@ -23,14 +23,8 @@ import com.google.inject.persist.PersistService;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
-import org.apache.ambari.server.state.stack.upgrade.ClusterGrouping;
+import org.apache.ambari.server.state.stack.upgrade.*;
 import org.apache.ambari.server.state.stack.upgrade.ClusterGrouping.ExecuteStage;
-import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition;
-import org.apache.ambari.server.state.stack.upgrade.Direction;
-import org.apache.ambari.server.state.stack.upgrade.Grouping;
-import org.apache.ambari.server.state.stack.upgrade.RestartGrouping;
-import org.apache.ambari.server.state.stack.upgrade.StopGrouping;
-import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -42,21 +36,33 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
 import static org.apache.ambari.server.state.stack.ConfigUpgradePack.AffectedService;
 import static org.apache.ambari.server.state.stack.ConfigUpgradePack.AffectedComponent;
+import static org.junit.Assert.*;
 
 /**
  * Tests for the config upgrade pack
  */
 public class ConfigUpgradePackTest {
 
+  private Injector injector;
+  private AmbariMetaInfo ambariMetaInfo;
+
+  @Before
+  public void before() throws Exception {
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+
+    ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+  }
+
+  @After
+  public void teardown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
   @Test
   public void testMerge() {
-
     // Generate test data - 3 config upgrade packs, 2 services, 2 components, 2 config changes each
     ArrayList<ConfigUpgradePack> cups = new ArrayList<>();
     for (int cupIndex = 0; cupIndex < 3; cupIndex++) {
@@ -145,4 +151,48 @@ public class ConfigUpgradePackTest {
 
   }
 
+  @Test
+  public void testConfigUpgradeDefinitionParsing() throws Exception {
+    ConfigUpgradePack cup = ambariMetaInfo.getConfigUpgradePack("HDP", "2.1.1");
+    Map<String, ConfigUpgradeChangeDefinition> changesByID = cup.enumerateConfigChangesByID();
+
+    ConfigUpgradeChangeDefinition hdp_2_1_1_nm_pre_upgrade = changesByID.get("hdp_2_1_1_nm_pre_upgrade");
+    assertEquals("core-site", hdp_2_1_1_nm_pre_upgrade.getConfigType());
+    assertEquals(4, hdp_2_1_1_nm_pre_upgrade.getTransfers().size());
+
+    /*
+            <transfer operation="COPY" from-key="copy-key" to-key="copy-key-to" />
+            <transfer operation="COPY" from-type="my-site" from-key="my-copy-key" to-key="my-copy-key-to" />
+            <transfer operation="MOVE" from-key="move-key" to-key="move-key-to" />
+            <transfer operation="DELETE" delete-key="delete-key">
+              <keep-key>important-key</keep-key>
+            </transfer>
+    */
+    ConfigUpgradeChangeDefinition.Transfer t1 = hdp_2_1_1_nm_pre_upgrade.getTransfers().get(0);
+    assertEquals(TransferOperation.COPY, t1.operation);
+    assertEquals("copy-key", t1.fromKey);
+    assertEquals("copy-key-to", t1.toKey);
+
+    ConfigUpgradeChangeDefinition.Transfer t2 = hdp_2_1_1_nm_pre_upgrade.getTransfers().get(1);
+    assertEquals(TransferOperation.COPY, t2.operation);
+    assertEquals("my-site", t2.fromType);
+    assertEquals("my-copy-key", t2.fromKey);
+    assertEquals("my-copy-key-to", t2.toKey);
+    assertTrue(t2.keepKeys.isEmpty());
+
+    ConfigUpgradeChangeDefinition.Transfer t3 = hdp_2_1_1_nm_pre_upgrade.getTransfers().get(2);
+    assertEquals(TransferOperation.MOVE, t3.operation);
+    assertEquals("move-key", t3.fromKey);
+    assertEquals("move-key-to", t3.toKey);
+
+    ConfigUpgradeChangeDefinition.Transfer t4 = hdp_2_1_1_nm_pre_upgrade.getTransfers().get(3);
+    assertEquals(TransferOperation.DELETE, t4.operation);
+    assertEquals("delete-key", t4.deleteKey);
+    assertNull(t4.toKey);
+    assertTrue(t4.preserveEdits);
+    assertEquals(1, t4.keepKeys.size());
+    assertEquals("important-key", t4.keepKeys.get(0));
+
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
index cfeb08c..ade5d14 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
@@ -85,112 +85,78 @@ public class UpgradePackTest {
   }
 
 
-// TODO: fixme
-//  @Test
-//  public void testUpgradeParsing() throws Exception {
-//    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
-//    assertTrue(upgrades.size() > 0);
-//    assertTrue(upgrades.containsKey("upgrade_test"));
-//    UpgradePack upgrade = upgrades.get("upgrade_test");
-//    assertEquals("2.2.*.*", upgrade.getTarget());
-//
-//    Map<String, List<String>> expectedStages = new LinkedHashMap<String, List<String>>() {{
-//      put("ZOOKEEPER", Arrays.asList("ZOOKEEPER_SERVER"));
-//      put("HDFS", Arrays.asList("NAMENODE", "DATANODE"));
-//    }};
-//
-//    // !!! test the tasks
-//    int i = 0;
-//    for (Entry<String, List<String>> entry : expectedStages.entrySet()) {
-//      assertTrue(upgrade.getTasks().containsKey(entry.getKey()));
-//      assertEquals(i++, indexOf(upgrade.getTasks(), entry.getKey()));
-//
-//      // check that the number of components matches
-//      assertEquals(entry.getValue().size(), upgrade.getTasks().get(entry.getKey()).size());
-//
-//      // check component ordering
-//      int j = 0;
-//      for (String comp : entry.getValue()) {
-//        assertEquals(j++, indexOf(upgrade.getTasks().get(entry.getKey()), comp));
-//      }
-//    }
-//
-//    // !!! test specific tasks
-//    assertTrue(upgrade.getTasks().containsKey("HDFS"));
-//    assertTrue(upgrade.getTasks().get("HDFS").containsKey("NAMENODE"));
-//
-//    ProcessingComponent pc = upgrade.getTasks().get("HDFS").get("NAMENODE");
-//    assertNotNull(pc.preTasks);
-//    assertNotNull(pc.postTasks);
-//    assertNotNull(pc.tasks);
-//    assertNull(pc.preDowngradeTasks);
-//    assertNull(pc.postDowngradeTasks);
-//    assertEquals(1, pc.tasks.size());
-//
-//    assertEquals(Task.Type.RESTART, pc.tasks.get(0).getType());
-//    assertEquals(RestartTask.class, pc.tasks.get(0).getClass());
-//
-//
-//    assertTrue(upgrade.getTasks().containsKey("ZOOKEEPER"));
-//    assertTrue(upgrade.getTasks().get("ZOOKEEPER").containsKey("ZOOKEEPER_SERVER"));
-//
-//    pc = upgrade.getTasks().get("HDFS").get("DATANODE");
-//    assertNotNull(pc.preDowngradeTasks);
-//    assertEquals(0, pc.preDowngradeTasks.size());
-//    assertNotNull(pc.postDowngradeTasks);
-//    assertEquals(1, pc.postDowngradeTasks.size());
-//
-//
-//    pc = upgrade.getTasks().get("ZOOKEEPER").get("ZOOKEEPER_SERVER");
-//    assertNotNull(pc.preTasks);
-//    assertEquals(1, pc.preTasks.size());
-//    assertNotNull(pc.postTasks);
-//    assertEquals(1, pc.postTasks.size());
-//    assertNotNull(pc.tasks);
-//    assertEquals(1, pc.tasks.size());
-//
-//    pc = upgrade.getTasks().get("YARN").get("NODEMANAGER");
-//    assertNotNull(pc.preTasks);
-//    assertEquals(2, pc.preTasks.size());
-//    Task t = pc.preTasks.get(1);
-//    assertEquals(ConfigureTask.class, t.getClass());
-//    ConfigureTask ct = (ConfigureTask) t;
-//    assertEquals("core-site", ct.getConfigType());
-//    assertEquals(4, ct.getTransfers().size());
-//
-//    /*
-//            <transfer operation="COPY" from-key="copy-key" to-key="copy-key-to" />
-//            <transfer operation="COPY" from-type="my-site" from-key="my-copy-key" to-key="my-copy-key-to" />
-//            <transfer operation="MOVE" from-key="move-key" to-key="move-key-to" />
-//            <transfer operation="DELETE" delete-key="delete-key">
-//              <keep-key>important-key</keep-key>
-//            </transfer>
-//    */
-//    Transfer t1 = ct.getTransfers().get(0);
-//    assertEquals(TransferOperation.COPY, t1.operation);
-//    assertEquals("copy-key", t1.fromKey);
-//    assertEquals("copy-key-to", t1.toKey);
-//
-//    Transfer t2 = ct.getTransfers().get(1);
-//    assertEquals(TransferOperation.COPY, t2.operation);
-//    assertEquals("my-site", t2.fromType);
-//    assertEquals("my-copy-key", t2.fromKey);
-//    assertEquals("my-copy-key-to", t2.toKey);
-//    assertTrue(t2.keepKeys.isEmpty());
-//
-//    Transfer t3 = ct.getTransfers().get(2);
-//    assertEquals(TransferOperation.MOVE, t3.operation);
-//    assertEquals("move-key", t3.fromKey);
-//    assertEquals("move-key-to", t3.toKey);
-//
-//    Transfer t4 = ct.getTransfers().get(3);
-//    assertEquals(TransferOperation.DELETE, t4.operation);
-//    assertEquals("delete-key", t4.deleteKey);
-//    assertNull(t4.toKey);
-//    assertTrue(t4.preserveEdits);
-//    assertEquals(1, t4.keepKeys.size());
-//    assertEquals("important-key", t4.keepKeys.get(0));
-//  }
+  @Test
+  public void testUpgradeParsing() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+    assertTrue(upgrades.size() > 0);
+    assertTrue(upgrades.containsKey("upgrade_test"));
+    UpgradePack upgrade = upgrades.get("upgrade_test");
+    assertEquals("2.2.*.*", upgrade.getTarget());
+
+    Map<String, List<String>> expectedStages = new LinkedHashMap<String, List<String>>() {{
+      put("ZOOKEEPER", Arrays.asList("ZOOKEEPER_SERVER"));
+      put("HDFS", Arrays.asList("NAMENODE", "DATANODE"));
+    }};
+
+    // !!! test the tasks
+    int i = 0;
+    for (Entry<String, List<String>> entry : expectedStages.entrySet()) {
+      assertTrue(upgrade.getTasks().containsKey(entry.getKey()));
+      assertEquals(i++, indexOf(upgrade.getTasks(), entry.getKey()));
+
+      // check that the number of components matches
+      assertEquals(entry.getValue().size(), upgrade.getTasks().get(entry.getKey()).size());
+
+      // check component ordering
+      int j = 0;
+      for (String comp : entry.getValue()) {
+        assertEquals(j++, indexOf(upgrade.getTasks().get(entry.getKey()), comp));
+      }
+    }
+
+    // !!! test specific tasks
+    assertTrue(upgrade.getTasks().containsKey("HDFS"));
+    assertTrue(upgrade.getTasks().get("HDFS").containsKey("NAMENODE"));
+
+    ProcessingComponent pc = upgrade.getTasks().get("HDFS").get("NAMENODE");
+    assertNotNull(pc.preTasks);
+    assertNotNull(pc.postTasks);
+    assertNotNull(pc.tasks);
+    assertNull(pc.preDowngradeTasks);
+    assertNull(pc.postDowngradeTasks);
+    assertEquals(1, pc.tasks.size());
+
+    assertEquals(Task.Type.RESTART, pc.tasks.get(0).getType());
+    assertEquals(RestartTask.class, pc.tasks.get(0).getClass());
+
+
+    assertTrue(upgrade.getTasks().containsKey("ZOOKEEPER"));
+    assertTrue(upgrade.getTasks().get("ZOOKEEPER").containsKey("ZOOKEEPER_SERVER"));
+
+    pc = upgrade.getTasks().get("HDFS").get("DATANODE");
+    assertNotNull(pc.preDowngradeTasks);
+    assertEquals(0, pc.preDowngradeTasks.size());
+    assertNotNull(pc.postDowngradeTasks);
+    assertEquals(1, pc.postDowngradeTasks.size());
+
+
+    pc = upgrade.getTasks().get("ZOOKEEPER").get("ZOOKEEPER_SERVER");
+    assertNotNull(pc.preTasks);
+    assertEquals(1, pc.preTasks.size());
+    assertNotNull(pc.postTasks);
+    assertEquals(1, pc.postTasks.size());
+    assertNotNull(pc.tasks);
+    assertEquals(1, pc.tasks.size());
+
+    pc = upgrade.getTasks().get("YARN").get("NODEMANAGER");
+    assertNotNull(pc.preTasks);
+    assertEquals(2, pc.preTasks.size());
+    Task t = pc.preTasks.get(1);
+    assertEquals(ConfigureTask.class, t.getClass());
+    ConfigureTask ct = (ConfigureTask) t;
+    // check that the Configure task successfully parsed id
+    assertEquals("hdp_2_1_1_nm_pre_upgrade", ct.getId());
+  }
 
   @Test
   public void testGroupOrdersForRolling() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/config-upgrade.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/config-upgrade.xml
new file mode 100644
index 0000000..1301f9d
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/config-upgrade.xml
@@ -0,0 +1,101 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<upgrade-config-changes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+
+  <services>
+    <service name="ZOOKEEPER">
+      <component name="ZOOKEEPER_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_1_1_zk_post_upgrade">
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="HDFS">
+      <component name="NAMENODE">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_1_1_nn_pre_upgrade">
+            <type>hdfs-site</type>
+            <set key="myproperty" value="mynewvalue"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="YARN">
+      <component name="NODEMANAGER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_1_1_nm_pre_upgrade">
+            <type>core-site</type>
+            <transfer operation="copy" from-key="copy-key"
+                      to-key="copy-key-to"/>
+            <transfer operation="copy" from-type="my-site"
+                      from-key="my-copy-key"
+                      to-key="my-copy-key-to"/>
+            <transfer operation="move" from-key="move-key"
+                      to-key="move-key-to"/>
+            <transfer operation="delete" delete-key="delete-key"
+                      preserve-edits="true">
+              <keep-key>important-key</keep-key>
+            </transfer>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="HIVE">
+      <component name="HIVE_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_1_1_set_transport_mode">
+            <condition type="hive-site" key="hive.server2.transport.mode" value="binary">
+              <type>hive-site</type>
+              <key>hive.server2.thrift.port</key>
+              <value>10010</value>
+            </condition>
+            <condition type="hive-site" key="hive.server2.transport.mode" value="http">
+              <type>hive-site</type>
+              <key>hive.server2.http.port</key>
+              <value>10011</value>
+            </condition>
+          </definition>
+
+          <definition xsi:type="configure" id="hdp_2_1_1_hive_server_foo">
+            <type>hive-site</type>
+            <set key="fooKey" value="fooValue"/>
+            <set key="fooKey2" value="fooValue2"/>
+            <set key="fooKey3" value="fooValue3"/>
+            <transfer operation="copy" from-key="copy-key" to-key="copy-key-to" />
+            <transfer operation="move" from-key="move-key" to-key="move-key-to" />
+            <transfer operation="delete" delete-key="delete-key" />
+            <transfer operation="delete" delete-key="delete-http" if-key="hive.server2.transport.mode" if-type="hive-site" if-value="http" />
+            <transfer operation="delete" delete-key="delete-https-fail" if-key="hive.server2.transport.mode" if-type="hive-site" if-value="https" />
+            <transfer operation="delete" delete-key="delete-prop-fail" if-key="non.existent" if-type="hive-site" if-value="https" />
+            <transfer operation="delete" delete-key="delete-type-fail" if-key="non.existent" if-type="non.existent" if-value="" />
+            <transfer operation="delete" delete-key="delete-null-if-value" if-key="non.existent" if-type="non.existent" />
+            <transfer operation="delete" delete-key="delete-blank-if-key" if-key="" if-type="non.existent" />
+            <transfer operation="delete" delete-key="delete-blank-if-type" if-key="non.existent" if-type="" />
+            <transfer operation="delete" delete-key="delete-thrift" if-key="hive.server2.thrift.port" if-type="hive-site" if-value="10001" />
+          </definition>
+        </changes>
+      </component>
+    </service>
+  </services>
+
+</upgrade-config-changes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
index 922fb53..827348a 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test.xml
@@ -130,7 +130,7 @@
           <task xsi:type="restart-task" />
         </upgrade>
         <post-upgrade>
-          <task xsi:type="configure" />
+          <task xsi:type="configure" id="2.2.0" />
         </post-upgrade>
       </component>
     </service>
@@ -141,10 +141,7 @@
           <task xsi:type="execute" hosts="master">
             <command>su - {hdfs-user} -c 'dosomething'</command>
           </task>
-          <task xsi:type="configure">
-            <type>hdfs-site</type>
-            <set key="myproperty" value="mynewvalue"/>
-          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_nn_pre_upgrade" />
           <task xsi:type="manual">
             <message>{{direction.verb.proper}} your database</message>
           </task>
@@ -184,15 +181,7 @@
           <task xsi:type="execute">
             <command>ls</command>
           </task>
-          <task xsi:type="configure">
-            <type>core-site</type>
-            <transfer operation="copy" from-key="copy-key" to-key="copy-key-to" />
-            <transfer operation="copy" from-type="my-site" from-key="my-copy-key" to-key="my-copy-key-to" />
-            <transfer operation="move" from-key="move-key" to-key="move-key-to" />
-            <transfer operation="delete" delete-key="delete-key" preserve-edits="true">
-            <keep-key>important-key</keep-key>
-            </transfer>
-          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_nm_pre_upgrade"/>
         </pre-upgrade>
       </component>
     </service>
@@ -205,36 +194,10 @@
             <message>The HiveServer port will now change to 10010 if hive is using a binary transfer mode or 10011 if hive is using an http transport mode. You can use "netstat -anp | grep 1001[01]" to determine if the port is available on each of following HiveServer host(s): {{hosts.all}}. If the port is not available, the process using it must be terminated.</message>
           </task>
 
-          <task xsi:type="configure">
-            <condition type="hive-site" key="hive.server2.transport.mode" value="binary">
-              <type>hive-site</type>
-              <key>hive.server2.thrift.port</key>
-              <value>10010</value>
-            </condition>
-            <condition type="hive-site" key="hive.server2.transport.mode" value="http">
-              <type>hive-site</type>
-              <key>hive.server2.http.port</key>
-              <value>10011</value>
-            </condition>
-          </task>
-          
-          <task xsi:type="configure">
-            <type>hive-site</type>
-            <set key="fooKey" value="fooValue"/>
-            <set key="fooKey2" value="fooValue2"/>
-            <set key="fooKey3" value="fooValue3"/>
-            <transfer operation="copy" from-key="copy-key" to-key="copy-key-to" />
-            <transfer operation="move" from-key="move-key" to-key="move-key-to" />
-            <transfer operation="delete" delete-key="delete-key" />
-            <transfer operation="delete" delete-key="delete-http" if-key="hive.server2.transport.mode" if-type="hive-site" if-value="http" />
-            <transfer operation="delete" delete-key="delete-https-fail" if-key="hive.server2.transport.mode" if-type="hive-site" if-value="https" />
-            <transfer operation="delete" delete-key="delete-prop-fail" if-key="non.existent" if-type="hive-site" if-value="https" />
-            <transfer operation="delete" delete-key="delete-type-fail" if-key="non.existent" if-type="non.existent" if-value="" />
-            <transfer operation="delete" delete-key="delete-null-if-value" if-key="non.existent" if-type="non.existent" />
-            <transfer operation="delete" delete-key="delete-blank-if-key" if-key="" if-type="non.existent" />
-            <transfer operation="delete" delete-key="delete-blank-if-type" if-key="non.existent" if-type="" />
-            <transfer operation="delete" delete-key="delete-thrift" if-key="hive.server2.thrift.port" if-type="hive-site" if-value="10001" />
-          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_set_transport_mode"/>
+
+          <task xsi:type="configure" id="hdp_2_1_1_hive_server_foo"/>
+
         </pre-upgrade>
        </component>
      </service>    

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_checks.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_checks.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_checks.xml
index f64705e..b89946c 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_checks.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_checks.xml
@@ -135,7 +135,7 @@
           <task xsi:type="restart-task" />
         </upgrade>
         <post-upgrade>
-          <task xsi:type="configure" />
+          <task xsi:type="configure" id="hdp_2_1_1_zk_post_upgrade"/>
         </post-upgrade>
       </component>
     </service>
@@ -145,10 +145,7 @@
           <task xsi:type="execute" hosts="master">
             <command>su - {hdfs-user} -c 'dosomething'</command>
           </task>
-          <task xsi:type="configure">
-            <type>hdfs-site</type>
-            <set key="myproperty" value="mynewvalue"/>
-          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_nn_pre_upgrade"/>
           <task xsi:type="manual">
             <message>Update your database</message>
           </task>

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/config-upgrade.xml b/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/config-upgrade.xml
new file mode 100644
index 0000000..90d64b4
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/config-upgrade.xml
@@ -0,0 +1,101 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<upgrade-config-changes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+
+  <services>
+    <service name="ZOOKEEPER">
+      <component name="ZOOKEEPER_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_2_0_zk_post_upgrade">
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="HDFS">
+      <component name="NAMENODE">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_2_0_nn_pre_upgrade">
+            <type>hdfs-site</type>
+            <set key="myproperty" value="mynewvalue"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="YARN">
+      <component name="NODEMANAGER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_2_0_nm_pre_upgrade">
+            <type>core-site</type>
+            <transfer operation="copy" from-key="copy-key"
+                      to-key="copy-key-to"/>
+            <transfer operation="copy" from-type="my-site"
+                      from-key="my-copy-key"
+                      to-key="my-copy-key-to"/>
+            <transfer operation="move" from-key="move-key"
+                      to-key="move-key-to"/>
+            <transfer operation="delete" delete-key="delete-key"
+                      preserve-edits="true">
+              <keep-key>important-key</keep-key>
+            </transfer>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
+    <service name="HIVE">
+      <component name="HIVE_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_2_0_set_transport_mode">
+            <condition type="hive-site" key="hive.server2.transport.mode" value="binary">
+              <type>hive-site</type>
+              <key>hive.server2.thrift.port</key>
+              <value>10010</value>
+            </condition>
+            <condition type="hive-site" key="hive.server2.transport.mode" value="http">
+              <type>hive-site</type>
+              <key>hive.server2.http.port</key>
+              <value>10011</value>
+            </condition>
+          </definition>
+
+          <definition xsi:type="configure" id="hdp_2_2_0_hive_server_foo">
+            <type>hive-site</type>
+            <set key="fooKey" value="fooValue"/>
+            <set key="fooKey2" value="fooValue2"/>
+            <set key="fooKey3" value="fooValue3"/>
+            <transfer operation="copy" from-key="copy-key" to-key="copy-key-to" />
+            <transfer operation="move" from-key="move-key" to-key="move-key-to" />
+            <transfer operation="delete" delete-key="delete-key" />
+            <transfer operation="delete" delete-key="delete-http" if-key="hive.server2.transport.mode" if-type="hive-site" if-value="http" />
+            <transfer operation="delete" delete-key="delete-https-fail" if-key="hive.server2.transport.mode" if-type="hive-site" if-value="https" />
+            <transfer operation="delete" delete-key="delete-prop-fail" if-key="non.existent" if-type="hive-site" if-value="https" />
+            <transfer operation="delete" delete-key="delete-type-fail" if-key="non.existent" if-type="non.existent" if-value="" />
+            <transfer operation="delete" delete-key="delete-null-if-value" if-key="non.existent" if-type="non.existent" />
+            <transfer operation="delete" delete-key="delete-blank-if-key" if-key="" if-type="non.existent" />
+            <transfer operation="delete" delete-key="delete-blank-if-type" if-key="non.existent" if-type="" />
+            <transfer operation="delete" delete-key="delete-thrift" if-key="hive.server2.thrift.port" if-type="hive-site" if-value="10001" />
+          </definition>
+        </changes>
+      </component>
+    </service>
+  </services>
+
+</upgrade-config-changes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/965369f9/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/upgrade_test_checks.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/upgrade_test_checks.xml b/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/upgrade_test_checks.xml
index 6c50d7b..14c68be 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/upgrade_test_checks.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.2.0/upgrades/upgrade_test_checks.xml
@@ -17,7 +17,7 @@
 -->
 <upgrade xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
   <target>2.2.*.*</target>
-  <target-stack>HDP-2.2.0</target-stack>
+  <target-stack>HDP-2.2.1</target-stack>
   <type>ROLLING</type>
   <prerequisite-checks>
     <check>org.apache.ambari.server.checks.HiveMultipleMetastoreCheck</check>
@@ -141,7 +141,7 @@
           <task xsi:type="restart-task" />
         </upgrade>
         <post-upgrade>
-          <task xsi:type="configure" />
+          <task xsi:type="configure" id="hdp_2_2_0_zk_post_upgrade"/>
         </post-upgrade>
       </component>
     </service>
@@ -151,10 +151,7 @@
           <task xsi:type="execute" hosts="master">
             <command>su - {hdfs-user} -c 'dosomething'</command>
           </task>
-          <task xsi:type="configure">
-            <type>hdfs-site</type>
-            <set key="myproperty" value="mynewvalue"/>
-          </task>
+          <task xsi:type="configure" id="hdp_2_2_0_nn_pre_upgrade"/>
           <task xsi:type="manual">
             <message>Update your database</message>
           </task>