You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2016/06/06 13:51:09 UTC

[2/2] ambari git commit: revert broken parts of "AMBARI-16272. Ambari Upgrade shouldn't automatically add stack configs (dlysnichenko)"

revert broken parts of "AMBARI-16272. Ambari Upgrade shouldn't automatically add stack configs (dlysnichenko)"


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bc9097b1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bc9097b1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bc9097b1

Branch: refs/heads/trunk
Commit: bc9097b146a77a833183cd3d560268644964e87f
Parents: 2c08fa6
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Jun 6 14:32:10 2016 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Jun 6 16:50:41 2016 +0300

----------------------------------------------------------------------
 .../internal/UpgradeResourceProvider.java       | 118 +++++++++++------
 .../ambari/server/state/ConfigHelper.java       |  14 +-
 .../server/upgrade/AbstractUpgradeCatalog.java  |  43 ++----
 .../UpgradeResourceProviderHDP22Test.java       |   2 +-
 .../internal/UpgradeResourceProviderTest.java   | 131 ++++++++-----------
 5 files changed, 158 insertions(+), 150 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bc9097b1/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index b2c21b9..fb3ae69 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -82,10 +82,9 @@ import org.apache.ambari.server.serveraction.upgrades.UpdateDesiredStackAction;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DesiredConfig;
-import org.apache.ambari.server.state.PropertyInfo;
-import org.apache.ambari.server.state.PropertyUpgradeBehavior;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceInfo;
@@ -991,7 +990,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
    * @throws AmbariException
    */
   public void applyStackAndProcessConfigurations(String stackName, Cluster cluster, String version, Direction direction, UpgradePack upgradePack, String userName)
-      throws AmbariException {
+    throws AmbariException {
     RepositoryVersionEntity targetRve = s_repoVersionDAO.findByStackNameAndVersion(stackName, version);
     if (null == targetRve) {
       LOG.info("Could not find version entity for {}; not setting new configs", version);
@@ -1023,12 +1022,18 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         break;
     }
 
-    Map<String, Map<String, String>> updatedConfigurationsByType = null;
+    Map<String, Map<String, String>> newConfigurationsByType = null;
     ConfigHelper configHelper = getManagementController().getConfigHelper();
 
     if (direction == Direction.UPGRADE) {
+      // populate a map of default configurations for the old stack (this is
+      // used when determining if a property has been customized and should be
+      // overriden with the new stack value)
+      Map<String, Map<String, String>> oldStackDefaultConfigurationsByType = configHelper.getDefaultProperties(
+          currentStackId, cluster);
+
       // populate a map with default configurations from the new stack
-      Map<String, Set<PropertyInfo>> newConfigurationsByType = configHelper.getDefaultProperties(targetStackId, cluster);
+      newConfigurationsByType = configHelper.getDefaultProperties(targetStackId, cluster);
 
       // We want to skip updating config-types of services that are not in the upgrade pack.
       // Care should be taken as some config-types could be in services that are in and out
@@ -1078,52 +1083,83 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         }
       }
 
-      // overlay new configuration on top of existing configurations
-
-      // create map of existing configurations
-      Map<String, Map<String, String>> existingPropertiesByType = new HashMap<>();
+      // now that the map has been populated with the default configurations
+      // from the stack/service, overlay the existing configurations on top
       Map<String, DesiredConfig> existingDesiredConfigurationsByType = cluster.getDesiredConfigs();
-      for (String configurationType : existingDesiredConfigurationsByType.keySet()) {
-        if (skipConfigTypes.contains(configurationType)) {
-          LOG.info("RU: Skipping config-configurationType {} as upgrade-pack contains no updates to its service", configurationType);
+      for (Map.Entry<String, DesiredConfig> existingEntry : existingDesiredConfigurationsByType.entrySet()) {
+        String configurationType = existingEntry.getKey();
+        if(skipConfigTypes.contains(configurationType)) {
+          LOG.info("RU: Skipping config-type {} as upgrade-pack contains no updates to its service", configurationType);
           continue;
         }
-        Map<String, String> existingProperties = cluster.getDesiredConfigByType(configurationType).getProperties();
-        existingPropertiesByType.put(configurationType, existingProperties);
-      }
 
-      // for every existing configuration, see if an entry exists:
-      // - if it does not exist and marked as ADD_ON_UPGRADE, then add it
-      // - if it does exist and marked as DELETE_ON_UPGRADE, then remove it
-      // - if it does exist and marked as CHANGE_ON_UPGRADE, then update it
-      for (Map.Entry<String, Set<PropertyInfo>> newPropertyEntry : newConfigurationsByType.entrySet()) {
-
-        String configurationType = newPropertyEntry.getKey();
-        Map<String, String> existingProperties = existingPropertiesByType.get(configurationType);
-        if (existingProperties == null) {
-          existingProperties = new HashMap<>();
-          existingPropertiesByType.put(configurationType, existingProperties);
+        // NPE sanity, although shouldn't even happen since we are iterating
+        // over the desired configs to start with
+        Config currentClusterConfig = cluster.getDesiredConfigByType(configurationType);
+        if (null == currentClusterConfig) {
+          continue;
         }
 
-        for (PropertyInfo propertyInfo : newPropertyEntry.getValue()) {
+        // get the existing configurations
+        Map<String, String> existingConfigurations = currentClusterConfig.getProperties();
 
-          String propertyName = propertyInfo.getName();
-          String propertyValue = propertyInfo.getValue();
-          PropertyUpgradeBehavior propertyStackUpgradeBehavior = propertyInfo.getPropertyStackUpgradeBehavior();
+        // if the new stack configurations don't have the type, then simple add
+        // all of the existing in
+        Map<String, String> newDefaultConfigurations = newConfigurationsByType.get(
+            configurationType);
+        if (null == newDefaultConfigurations) {
+          newConfigurationsByType.put(configurationType, existingConfigurations);
+          continue;
+        } else {
+          // TODO, should we remove existing configs whose value is NULL even though they don't have a value in the new stack?
+
+          // Remove any configs in the new stack whose value is NULL, unless they currently exist and the value is not NULL.
+          Iterator<Map.Entry<String, String>> iter = newDefaultConfigurations.entrySet().iterator();
+          while (iter.hasNext()) {
+            Map.Entry<String, String> entry = iter.next();
+            if (entry.getValue() == null) {
+              iter.remove();
+            }
+          }
+        }
 
-          if (!existingProperties.containsKey(propertyName) && propertyStackUpgradeBehavior.isAdd()) {
-            existingProperties.put(propertyName, propertyValue);
-          } else if (existingProperties.containsKey(propertyName)) {
-            if (propertyStackUpgradeBehavior.isDelete()) {
-              existingProperties.remove(propertyName);
-            } else if (propertyStackUpgradeBehavior.isChange()) {
-              existingProperties.put(propertyName, propertyValue);
+        // for every existing configuration, see if an entry exists; if it does
+        // not exist, then put it in the map, otherwise we'll have to compare
+        // the existing value to the original stack value to see if its been
+        // customized
+        for (Map.Entry<String, String> existingConfigurationEntry : existingConfigurations.entrySet()) {
+          String existingConfigurationKey = existingConfigurationEntry.getKey();
+          String existingConfigurationValue = existingConfigurationEntry.getValue();
+
+          // if there is already an entry, we now have to try to determine if
+          // the value was customized after stack installation
+          if (newDefaultConfigurations.containsKey(existingConfigurationKey)) {
+            String newDefaultConfigurationValue = newDefaultConfigurations.get(
+                existingConfigurationKey);
+            if (!StringUtils.equals(existingConfigurationValue, newDefaultConfigurationValue)) {
+              // the new default is different from the existing cluster value;
+              // only override the default value if the existing value differs
+              // from the original stack
+              Map<String, String> configurationTypeDefaultConfigurations = oldStackDefaultConfigurationsByType.get(
+                  configurationType);
+              if (null != configurationTypeDefaultConfigurations) {
+                String oldDefaultValue = configurationTypeDefaultConfigurations.get(
+                    existingConfigurationKey);
+                if (!StringUtils.equals(existingConfigurationValue, oldDefaultValue)) {
+                  // at this point, we've determined that there is a difference
+                  // between default values between stacks, but the value was
+                  // also customized, so keep the customized value
+                  newDefaultConfigurations.put(existingConfigurationKey,
+                      existingConfigurationValue);
+                }
+              }
             }
+          } else {
+            // there is no entry in the map, so add the existing key/value pair
+            newDefaultConfigurations.put(existingConfigurationKey, existingConfigurationValue);
           }
         }
       }
-
-      updatedConfigurationsByType = existingPropertiesByType;
     } else {
       // downgrade
       cluster.applyLatestConfigurations(cluster.getCurrentStackVersion());
@@ -1134,8 +1170,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         new StackId(targetStack.getStackName(), targetStack.getStackVersion()), true);
 
     // !!! configs must be created after setting the stack version
-    if (null != updatedConfigurationsByType) {
-      configHelper.createConfigTypes(cluster, getManagementController(), updatedConfigurationsByType,
+    if (null != newConfigurationsByType) {
+      configHelper.createConfigTypes(cluster, getManagementController(), newConfigurationsByType,
           userName, "Configuration created for Upgrade");
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc9097b1/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 488603a..a3a2bda 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -1021,9 +1021,9 @@ public class ConfigHelper {
    *         default configurations.
    * @throws AmbariException
    */
-  public Map<String, Set<PropertyInfo>> getDefaultProperties(StackId stack, Cluster cluster)
+  public Map<String, Map<String, String>> getDefaultProperties(StackId stack, Cluster cluster)
       throws AmbariException {
-    Map<String, Set<PropertyInfo>> defaultPropertiesByType = new HashMap<String, Set<PropertyInfo>>();
+    Map<String, Map<String, String>> defaultPropertiesByType = new HashMap<String, Map<String, String>>();
 
     // populate the stack (non-service related) properties first
     Set<org.apache.ambari.server.state.PropertyInfo> stackConfigurationProperties = ambariMetaInfo.getStackProperties(
@@ -1033,10 +1033,11 @@ public class ConfigHelper {
       String type = ConfigHelper.fileNameToConfigType(stackDefaultProperty.getFilename());
 
       if (!defaultPropertiesByType.containsKey(type)) {
-        defaultPropertiesByType.put(type, new HashSet<PropertyInfo>());
+        defaultPropertiesByType.put(type, new HashMap<String, String>());
       }
 
-      defaultPropertiesByType.get(type).add(stackDefaultProperty);
+      defaultPropertiesByType.get(type).put(stackDefaultProperty.getName(),
+          stackDefaultProperty.getValue());
     }
 
     // for every installed service, populate the default service properties
@@ -1049,10 +1050,11 @@ public class ConfigHelper {
         String type = ConfigHelper.fileNameToConfigType(serviceDefaultProperty.getFilename());
 
         if (!defaultPropertiesByType.containsKey(type)) {
-          defaultPropertiesByType.put(type, new HashSet<PropertyInfo>());
+          defaultPropertiesByType.put(type, new HashMap<String, String>());
         }
 
-        defaultPropertiesByType.get(type).add(serviceDefaultProperty);
+        defaultPropertiesByType.get(type).put(serviceDefaultProperty.getName(),
+            serviceDefaultProperty.getValue());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc9097b1/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index f30bf40..3ee8bba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -24,6 +24,7 @@ import java.sql.Statement;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -309,7 +310,6 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (Cluster cluster : clusterMap.values()) {
         Map<String, Set<String>> newProperties = new HashMap<String, Set<String>>();
-        Map<String, Set<String>> overriddenProperties = new HashMap<String, Set<String>>();
 
         Set<PropertyInfo> stackProperties = configHelper.getStackProperties(cluster);
         for(String serviceName: cluster.getServices().keySet()) {
@@ -321,33 +321,21 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
           properties.addAll(stackProperties);
 
           for(PropertyInfo property:properties) {
-            if (property.getValue() == null) {
-              continue;
-            }
-
             String configType = ConfigHelper.fileNameToConfigType(property.getFilename());
             Config clusterConfigs = cluster.getDesiredConfigByType(configType);
-//            if (clusterConfigs == null || (!clusterConfigs.getProperties().containsKey(property.getName()) && property.getPropertyAmbariUpgradeBehavior().isAdd())) {
-//              LOG.info("Config " + property.getName() + " from " + configType + " from xml configurations" +
-//                      " will be added...");
-//              if (!newProperties.containsKey(configType)) {
-//                newProperties.put(configType, new HashSet<String>());
-//              }
-//              newProperties.get(configType).add(property.getName());
-//            } else if (clusterConfigs.getProperties().containsKey(property.getName())) {
-//              if (property.getPropertyAmbariUpgradeBehavior().isDelete()) {
-//                LOG.info("Config " + property.getName() + " from " + configType + " from xml configurations" +
-//                        " will be removed...");
-//                continue;
-//              } else if (property.getPropertyAmbariUpgradeBehavior().isChange()) {
-//                LOG.info("Config " + property.getName() + " from " + configType + " from xml configurations" +
-//                        " will be overridden...");
-//                if (!overriddenProperties.containsKey(configType)) {
-//                  overriddenProperties.put(configType, new HashSet<String>());
-//                }
-//                overriddenProperties.get(configType).add(property.getName());
-//              }
-//            }
+            if(clusterConfigs == null || !clusterConfigs.getProperties().containsKey(property.getName())) {
+              if (property.getValue() == null || property.getPropertyTypes().contains(PropertyInfo.PropertyType.DONT_ADD_ON_UPGRADE)) {
+                continue;
+              }
+
+              LOG.info("Config " + property.getName() + " from " + configType + " from xml configurations" +
+                  " is not found on the cluster. Adding it...");
+
+              if(!newProperties.containsKey(configType)) {
+                newProperties.put(configType, new HashSet<String>());
+              }
+              newProperties.get(configType).add(property.getName());
+            }
           }
         }
 
@@ -356,9 +344,6 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
         for (Entry<String, Set<String>> newProperty : newProperties.entrySet()) {
           updateConfigurationPropertiesWithValuesFromXml(newProperty.getKey(), newProperty.getValue(), false, true);
         }
-        for (Entry<String, Set<String>> overriddenProperty : overriddenProperties.entrySet()) {
-          updateConfigurationPropertiesWithValuesFromXml(overriddenProperty.getKey(), overriddenProperty.getValue(), true, true);
-        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc9097b1/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
index bc99117..c052a6c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
@@ -124,7 +124,7 @@ public class UpgradeResourceProviderHDP22Test {
         "placeholder-rendered-properly").anyTimes();
 
     expect(configHelper.getDefaultProperties(EasyMock.anyObject(StackId.class), EasyMock.anyObject(Cluster.class))).andReturn(
-        new HashMap<String, Set<org.apache.ambari.server.state.PropertyInfo>>()).anyTimes();
+        new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(configHelper.getEffectiveConfigAttributes(EasyMock.anyObject(Cluster.class), EasyMock.anyObject(Map.class))).andReturn(
         new HashMap<String, Map<String, Map<String, String>>>()).anyTimes();

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc9097b1/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 67be4e5..5bcfd86 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -17,8 +17,6 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import static com.google.common.collect.ImmutableMap.of;
-import static com.google.common.collect.Sets.newHashSet;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
@@ -83,8 +81,6 @@ import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.PropertyInfo;
-import org.apache.ambari.server.state.PropertyUpgradeBehavior;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
@@ -146,7 +142,7 @@ public class UpgradeResourceProviderTest {
     expect(
         configHelper.getDefaultProperties(EasyMock.anyObject(StackId.class),
             EasyMock.anyObject(Cluster.class))).andReturn(
-        new HashMap<String, Set<PropertyInfo>>()).anyTimes();
+        new HashMap<String, Map<String, String>>()).anyTimes();
 
 
     EasyMock.replay(configHelper);
@@ -1092,57 +1088,59 @@ public class UpgradeResourceProviderTest {
     StackId stack211 = new StackId("HDP-2.1.1");
     StackId stack220 = new StackId("HDP-2.2.0");
 
-    Set<PropertyInfo> stack211FirstConfig = newHashSet(
-        createProperty("1", "one", new PropertyUpgradeBehavior(true,false,false)),
-        createProperty("11", "one-one", new PropertyUpgradeBehavior(true,false,false))
-    );
-    Set<PropertyInfo> stack211SecondConfig = newHashSet(createProperty("2", "two", new PropertyUpgradeBehavior(true,false,false)));
-    Set<PropertyInfo> stack211ThirdConfig = newHashSet(createProperty("3", "three", new PropertyUpgradeBehavior(true,false,false)));
-
-    Map<String, Set<PropertyInfo>> stack211Configs = new HashMap<>(of(
-        "first-site", stack211FirstConfig,
-        "second-site", stack211SecondConfig,
-        "third-site", stack211ThirdConfig
-    ));
-
-    Set<PropertyInfo> stack220FirstConfig = newHashSet(
-        createProperty("1", "one-new", new PropertyUpgradeBehavior(false,false,true)),
-        createProperty("11", "any", new PropertyUpgradeBehavior(false,true,false)),
-        createProperty("111", "one-one-one", new PropertyUpgradeBehavior(true,false,false)),
-        createProperty("2", "two", new PropertyUpgradeBehavior(false,false,true))
-    );
-    Set<PropertyInfo> stack220ThirdConfig = newHashSet(createProperty("3", "three-ignored", new PropertyUpgradeBehavior(false,false,false)));
-    Set<PropertyInfo> stack220FlumeEnvConfig = newHashSet(createProperty("flume_env_key", "flume-env-value", new PropertyUpgradeBehavior(false,false,false)));
-    Map<String, Set<PropertyInfo>> stack220Configs = new HashMap<>(of(
-        "first-site", stack220FirstConfig,
-        "third-site", stack220ThirdConfig,
-        "flume-env", stack220FlumeEnvConfig
-    ));
-
-    Map<String, String> firstConfigProperties = new HashMap<>(of("1", "one", "11", "one-one"));
-    Map<String, String> secondConfigProperties = new HashMap<>(of("2", "two"));
-    Map<String, String> thirdConfigProperties = new HashMap<>(of("3", "three-changed"));
-
-    Config firstConfig = EasyMock.createNiceMock(Config.class);
-    Config secondConfig = EasyMock.createNiceMock(Config.class);
-    Config thirdConfig = EasyMock.createNiceMock(Config.class);
-
-    expect(firstConfig.getProperties()).andReturn(firstConfigProperties);
-    expect(secondConfig.getProperties()).andReturn(secondConfigProperties);
-    expect(thirdConfig.getProperties()).andReturn(thirdConfigProperties);
-
-    Map<String, DesiredConfig> desiredConfigurations = new HashMap<>();
-    desiredConfigurations.put("first-site", null);
-    desiredConfigurations.put("second-site", null);
-    desiredConfigurations.put("third-site", null);
+    Map<String, Map<String, String>> stack211Configs = new HashMap<String, Map<String, String>>();
+    Map<String, String> stack211FooType = new HashMap<String, String>();
+    Map<String, String> stack211BarType = new HashMap<String, String>();
+    Map<String, String> stack211BazType = new HashMap<String, String>();
+    stack211Configs.put("foo-site", stack211FooType);
+    stack211Configs.put("bar-site", stack211BarType);
+    stack211Configs.put("baz-site", stack211BazType);
+    stack211FooType.put("1", "one");
+    stack211FooType.put("11", "one-one");
+    stack211BarType.put("2", "two");
+    stack211BazType.put("3", "three");
+
+    Map<String, Map<String, String>> stack220Configs = new HashMap<String, Map<String, String>>();
+    Map<String, String> stack220FooType = new HashMap<String, String>();
+    Map<String, String> stack220BazType = new HashMap<String, String>();
+    Map<String, String> stack220FlumeEnvType = new HashMap<String, String>();
+    stack220Configs.put("foo-site", stack220FooType);
+    stack220Configs.put("baz-site", stack220BazType);
+    stack220Configs.put("flume-env", stack220FlumeEnvType);
+    stack220FooType.put("1", "one-new");
+    stack220FooType.put("111", "one-one-one");
+    stack220BazType.put("3", "three-new");
+    stack220FlumeEnvType.put("flume_env_key", "flume-env-value");
+
+    Map<String, String> clusterFooType = new HashMap<String, String>();
+    Map<String, String> clusterBarType = new HashMap<String, String>();
+    Map<String, String> clusterBazType = new HashMap<String, String>();
+
+    Config fooConfig = EasyMock.createNiceMock(Config.class);
+    Config barConfig = EasyMock.createNiceMock(Config.class);
+    Config bazConfig = EasyMock.createNiceMock(Config.class);
+
+    clusterFooType.put("1", "one");
+    clusterFooType.put("11", "one-one");
+    clusterBarType.put("2", "two");
+    clusterBazType.put("3", "three-changed");
+
+    expect(fooConfig.getProperties()).andReturn(clusterFooType);
+    expect(barConfig.getProperties()).andReturn(clusterBarType);
+    expect(bazConfig.getProperties()).andReturn(clusterBazType);
+
+    Map<String, DesiredConfig> desiredConfigurations = new HashMap<String, DesiredConfig>();
+    desiredConfigurations.put("foo-site", null);
+    desiredConfigurations.put("bar-site", null);
+    desiredConfigurations.put("baz-site", null);
 
     Cluster cluster = EasyMock.createNiceMock(Cluster.class);
     expect(cluster.getCurrentStackVersion()).andReturn(stack211);
     expect(cluster.getDesiredStackVersion()).andReturn(stack220);
     expect(cluster.getDesiredConfigs()).andReturn(desiredConfigurations);
-    expect(cluster.getDesiredConfigByType("first-site")).andReturn(firstConfig);
-    expect(cluster.getDesiredConfigByType("second-site")).andReturn(secondConfig);
-    expect(cluster.getDesiredConfigByType("third-site")).andReturn(thirdConfig);
+    expect(cluster.getDesiredConfigByType("foo-site")).andReturn(fooConfig);
+    expect(cluster.getDesiredConfigByType("bar-site")).andReturn(barConfig);
+    expect(cluster.getDesiredConfigByType("baz-site")).andReturn(bazConfig);
 
     // setup the config helper for placeholder resolution
     EasyMock.reset(configHelper);
@@ -1164,7 +1162,7 @@ public class UpgradeResourceProviderTest {
 
     EasyMock.expectLastCall().once();
 
-    EasyMock.replay(configHelper, cluster, firstConfig, secondConfig, thirdConfig);
+    EasyMock.replay(configHelper, cluster, fooConfig, barConfig, bazConfig);
 
     UpgradeResourceProvider upgradeResourceProvider = createProvider(amc);
 
@@ -1173,22 +1171,21 @@ public class UpgradeResourceProviderTest {
     upgradeResourceProvider.applyStackAndProcessConfigurations(stack211.getStackName(), cluster, "2.2.0.0", Direction.UPGRADE, upgrade, "admin");
 
     Map<String, Map<String, String>> expectedConfigurations = expectedConfigurationsCapture.getValue();
-    Map<String, String> resultingFirstConfig = expectedConfigurations.get("first-site");
-    Map<String, String> resultingSecondConfig = expectedConfigurations.get("second-site");
-    Map<String, String> resultingThirdConfig = expectedConfigurations.get("third-site");
+    Map<String, String> expectedFooType = expectedConfigurations.get("foo-site");
+    Map<String, String> expectedBarType = expectedConfigurations.get("bar-site");
+    Map<String, String> expectedBazType = expectedConfigurations.get("baz-site");
 
     // As the upgrade pack did not have any Flume updates, its configs should not be updated.
     assertFalse(expectedConfigurations.containsKey("flume-env"));
 
     // the really important values are one-new and three-changed; one-new
-    // indicates that the new stack value is changed since it was marked CHANGE_ON_UPGRADE
+    // indicates that the new stack value is changed since it was not customized
     // while three-changed represents that the customized value was preserved
     // even though the stack value changed
-    assertEquals("one-new", resultingFirstConfig.get("1"));
-    assertEquals(null, resultingFirstConfig.get("11"));
-    assertEquals("one-one-one", resultingFirstConfig.get("111"));
-    assertEquals("two", resultingSecondConfig.get("2"));
-    assertEquals("three-changed", resultingThirdConfig.get("3"));
+    assertEquals("one-new", expectedFooType.get("1"));
+    assertEquals("one-one", expectedFooType.get("11"));
+    assertEquals("two", expectedBarType.get("2"));
+    assertEquals("three-changed", expectedBazType.get("3"));
   }
 
   /**
@@ -1199,18 +1196,6 @@ public class UpgradeResourceProviderTest {
     return new UpgradeResourceProvider(amc);
   }
 
-  private PropertyInfo createProperty(String name, String value) {
-    return createProperty(name, value, null);
-  }
-
-  private PropertyInfo createProperty(String name, String value, PropertyUpgradeBehavior propertyStackUpgradeBehavior) {
-    PropertyInfo propertyInfo = new PropertyInfo();
-    propertyInfo.setName(name);
-    propertyInfo.setValue(value);
-    propertyInfo.setPropertyStackUpgradeBehavior(propertyStackUpgradeBehavior);
-    return propertyInfo;
-  }
-
   private RequestStatus testCreateResources() throws Exception {
 
     Cluster cluster = clusters.getCluster("c1");