You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2015/04/20 21:12:18 UTC

ambari git commit: AMBARI-10586. Add Prerequisite Check for Merged Configs (ncole)

Repository: ambari
Updated Branches:
  refs/heads/trunk 14b863738 -> 3d2684add


AMBARI-10586. Add Prerequisite Check for Merged Configs (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3d2684ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3d2684ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3d2684ad

Branch: refs/heads/trunk
Commit: 3d2684addc4108cd7a3f6a4d429dd2a1511cd748
Parents: 14b8637
Author: Nate Cole <nc...@hortonworks.com>
Authored: Sat Apr 18 17:49:54 2015 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Apr 20 15:12:04 2015 -0400

----------------------------------------------------------------------
 .../ambari/server/checks/CheckDescription.java  |   7 +
 .../server/checks/ConfigurationMergeCheck.java  | 150 ++++++++++++
 .../checks/HostsRepositoryVersionCheck.java     |  57 +++--
 .../PreUpgradeCheckResourceProvider.java        |  68 +++---
 .../server/orm/dao/RepositoryVersionDAO.java    |  13 +
 .../orm/entities/RepositoryVersionEntity.java   |   3 +-
 .../ambari/server/state/ConfigMergeHelper.java  | 192 +++++++++++++++
 .../server/state/stack/PrereqCheckStatus.java   |   1 +
 .../checks/ConfigurationMergeCheckTest.java     | 237 +++++++++++++++++++
 .../checks/HostsRepositoryVersionCheckTest.java |  59 +++++
 10 files changed, 730 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index 0db235f..b00d6fb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -121,6 +121,13 @@ public enum CheckDescription {
       new HashMap<String, String>() {{
         put(AbstractCheckDescriptor.DEFAULT,
           "YARN should have work preserving restart enabled. The yarn-site.xml property yarn.resourcemanager.work-preserving-recovery.enabled property should be set to true.");
+      }}),
+
+  CONFIG_MERGE(PrereqCheckType.CLUSTER,
+      "Configuration Merge Check",
+      new HashMap<String, String>() {{
+        put(AbstractCheckDescriptor.DEFAULT,
+          "The following config types will have values overwritten: %s");
       }});
 
   private PrereqCheckType m_type;

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/checks/ConfigurationMergeCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/ConfigurationMergeCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/ConfigurationMergeCheck.java
new file mode 100644
index 0000000..bcce006
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/ConfigurationMergeCheck.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ConfigMergeHelper;
+import org.apache.ambari.server.state.ConfigMergeHelper.ThreeWayValue;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.apache.ambari.server.utils.VersionUtils;
+import org.apache.commons.lang.StringUtils;
+
+import com.google.inject.Inject;
+
+/**
+ * Checks for configuration merge conflicts.
+ */
+public class ConfigurationMergeCheck extends AbstractCheckDescriptor {
+
+  @Inject
+  ConfigMergeHelper m_mergeHelper;
+
+  public ConfigurationMergeCheck() {
+    super(CheckDescription.CONFIG_MERGE);
+  }
+
+  @Override
+  public boolean isApplicable(PrereqCheckRequest request) throws AmbariException {
+    String repoVersion = request.getRepositoryVersion();
+    if (null == repoVersion) {
+      return false;
+    }
+
+    RepositoryVersionEntity rve = findByVersion(repoVersion);
+    if (null == rve) {
+      return false;
+    }
+
+    Cluster cluster = clustersProvider.get().getCluster(request.getClusterName());
+
+    if (rve.getStackId().equals(cluster.getCurrentStackVersion())) {
+      return false;
+    }
+
+    return true;
+  }
+
+
+  /**
+   * The following logic determines if a warning is generated for config merge
+   * issues:
+   * <ul>
+   *   <li>A value that has been customized from HDP 2.2.x.x no longer exists in HDP 2.3.x.x</li>
+   *   <li>A value that has been customized from HDP 2.2.x.x has changed its default value between HDP 2.2.x.x and HDP 2.3.x.x</li>
+   * </ul>
+   */
+  @Override
+  public void perform(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest request)
+      throws AmbariException {
+
+    RepositoryVersionEntity rve = findByVersion(request.getRepositoryVersion());
+
+    Map<String, Map<String, ThreeWayValue>> changes =
+        m_mergeHelper.getConflicts(request.getClusterName(), rve.getStackId());
+
+    Set<String> failedTypes = new HashSet<String>();
+
+    for (Entry<String, Map<String, ThreeWayValue>> entry : changes.entrySet()) {
+      for (Entry<String, ThreeWayValue> configEntry : entry.getValue().entrySet()) {
+
+        ThreeWayValue twv = configEntry.getValue();
+        if (null == twv.oldStackValue) { // !!! did not exist and in the map means changed
+          failedTypes.add(entry.getKey());
+          prerequisiteCheck.getFailedOn().add(entry.getKey() + "/" + configEntry.getKey());
+        } else if (!twv.oldStackValue.equals(twv.savedValue)) {  // !!! value customized
+          if (null == twv.newStackValue || // !!! not in new stack
+              !twv.oldStackValue.equals(twv.newStackValue)) { // !!! or the default value changed
+            failedTypes.add(entry.getKey());
+            prerequisiteCheck.getFailedOn().add(entry.getKey() + "/" + configEntry.getKey());
+          }
+        }
+      }
+    }
+
+    if (prerequisiteCheck.getFailedOn().size() > 0) {
+      prerequisiteCheck.setStatus(PrereqCheckStatus.WARNING);
+      String failReason = getFailReason(prerequisiteCheck, request);
+
+      prerequisiteCheck.setFailReason(String.format(failReason, StringUtils.join(
+          failedTypes, ", ")));
+
+    } else {
+      prerequisiteCheck.setStatus(PrereqCheckStatus.PASS);
+    }
+  }
+
+  /**
+   * Finds the repository version, making sure if there is more than one match
+   * (unlikely) that the latest stack is chosen.
+   * @param version the version to find
+   * @return the matching repo version entity
+   */
+  private RepositoryVersionEntity findByVersion(String version) {
+    List<RepositoryVersionEntity> list = repositoryVersionDaoProvider.get().findByVersion(version);
+    if (null == list || 0 == list.size()) {
+      return null;
+    } else if (1 == list.size()) {
+      return list.get(0);
+    } else {
+      Collections.sort(list, new Comparator<RepositoryVersionEntity>() {
+        @Override
+        public int compare(RepositoryVersionEntity o1, RepositoryVersionEntity o2) {
+          return VersionUtils.compareVersions(o1.getVersion(), o2.getVersion());
+        }
+      });
+
+      Collections.reverse(list);
+
+      return list.get(0);
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
index 0db7e2e..c13b2ff 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
@@ -57,32 +57,51 @@ public class HostsRepositoryVersionCheck extends AbstractCheckDescriptor {
     final Cluster cluster = clustersProvider.get().getCluster(clusterName);
     final Map<String, Host> clusterHosts = clustersProvider.get().getHostsForCluster(clusterName);
     final StackId stackId = cluster.getDesiredStackVersion();
-    for (Map.Entry<String, Host> hostEntry : clusterHosts.entrySet()) {
-      final Host host = hostEntry.getValue();
+
+    for (Host host : clusterHosts.values()) {
       if (host.getMaintenanceState(cluster.getClusterId()) == MaintenanceState.OFF) {
-        final RepositoryVersionEntity repositoryVersion = repositoryVersionDaoProvider.get().findByStackAndVersion(
-            stackId, request.getRepositoryVersion());
-        if (repositoryVersion == null) {
-          prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
-          prerequisiteCheck.setFailReason(getFailReason(KEY_NO_REPO_VERSION, prerequisiteCheck, request));
-          prerequisiteCheck.getFailedOn().addAll(clusterHosts.keySet());
-          return;
-        }
 
-        StackEntity repositoryStackEntity = repositoryVersion.getStack();
-        StackId repositoryStackId = new StackId(
-            repositoryStackEntity.getStackName(),
-            repositoryStackEntity.getStackVersion());
+        if (null != request.getRepositoryVersion()) {
+          boolean found = false;
+          for (HostVersionEntity hve : hostVersionDaoProvider.get().findByHost(host.getHostName())) {
+
+            if (hve.getRepositoryVersion().getVersion().equals(request.getRepositoryVersion()) &&
+                hve.getState() == RepositoryVersionState.INSTALLED) {
+                found = true;
+                break;
+            }
+          }
+
+          if (!found) {
+            prerequisiteCheck.getFailedOn().add(host.getHostName());
+          }
+        } else {
+          final RepositoryVersionEntity repositoryVersion = repositoryVersionDaoProvider.get().findByStackAndVersion(
+              stackId, request.getRepositoryVersion());
+          if (repositoryVersion == null) {
+            prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
+            prerequisiteCheck.setFailReason(getFailReason(KEY_NO_REPO_VERSION, prerequisiteCheck, request));
+            prerequisiteCheck.getFailedOn().addAll(clusterHosts.keySet());
+            return;
+          }
 
-        final HostVersionEntity hostVersion = hostVersionDaoProvider.get().findByClusterStackVersionAndHost(
-            clusterName, repositoryStackId, repositoryVersion.getVersion(),
-            host.getHostName());
+          StackEntity repositoryStackEntity = repositoryVersion.getStack();
+          StackId repositoryStackId = new StackId(
+              repositoryStackEntity.getStackName(),
+              repositoryStackEntity.getStackVersion());
+
+          final HostVersionEntity hostVersion = hostVersionDaoProvider.get().findByClusterStackVersionAndHost(
+              clusterName, repositoryStackId, repositoryVersion.getVersion(),
+              host.getHostName());
+
+          if (hostVersion == null || hostVersion.getState() != RepositoryVersionState.INSTALLED) {
+            prerequisiteCheck.getFailedOn().add(host.getHostName());
+          }
 
-        if (hostVersion == null || hostVersion.getState() != RepositoryVersionState.INSTALLED) {
-          prerequisiteCheck.getFailedOn().add(host.getHostName());
         }
       }
     }
+
     if (!prerequisiteCheck.getFailedOn().isEmpty()) {
       prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
       prerequisiteCheck.setFailReason(getFailReason(prerequisiteCheck, request));

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
index ee8cefce..a6fd61f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
@@ -17,7 +17,8 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -26,6 +27,7 @@ import java.util.Set;
 
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.checks.AbstractCheckDescriptor;
+import org.apache.ambari.server.checks.ConfigurationMergeCheck;
 import org.apache.ambari.server.checks.HostsHeartbeatCheck;
 import org.apache.ambari.server.checks.HostsMasterMaintenanceCheck;
 import org.apache.ambari.server.checks.HostsRepositoryVersionCheck;
@@ -51,6 +53,7 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.state.CheckHelper;
 import org.apache.ambari.server.state.stack.PrerequisiteCheck;
 
+import com.google.common.collect.Sets;
 import com.google.inject.Inject;
 
 /**
@@ -92,49 +95,40 @@ public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
   private static ServicesUpCheck servicesUpCheck;
   @Inject
   private static ServicesTezDistributedCacheCheck servicesTezDistributedCacheCheck;
+  @Inject
+  private static ConfigurationMergeCheck configMergeCheck;
 
   /**
    * List of the registered upgrade checks.  Make sure that if a check that
    * depends on the result of another check comes earlier in the list.
    * For example, MR2 and Tez distributed cache checks rely on NN-HA check passing.
    */
-  @SuppressWarnings("serial")
-  private final List<AbstractCheckDescriptor> updateChecksRegistry = new ArrayList<AbstractCheckDescriptor>() {
-    {
-      add(hostsMasterMaintenanceCheck);
-      add(hostsRepositoryVersionCheck);
-      add(servicesMaintenanceModeCheck);
-      add(servicesNamenodeHighAvailabilityCheck);
-      add(secondaryNamenodeDeletedCheck);
-      add(servicesYarnWorkPreservingCheck);
-      add(servicesDecommissionCheck);
-      add(servicesJobsDistributedCacheCheck);
-      add(heartbeatCheck);
-      add(servicesUpCheck);
-      add(servicesTezDistributedCacheCheck);
-    }
-  };
-
-  @SuppressWarnings("serial")
-  private static Set<String> pkPropertyIds = new HashSet<String>() {
-    {
-      add(UPGRADE_CHECK_ID_PROPERTY_ID);
-    }
-  };
+  private List<AbstractCheckDescriptor> updateChecksRegistry = Arrays.asList(
+      hostsMasterMaintenanceCheck,
+      hostsRepositoryVersionCheck,
+      servicesMaintenanceModeCheck,
+      servicesNamenodeHighAvailabilityCheck,
+      secondaryNamenodeDeletedCheck,
+      servicesYarnWorkPreservingCheck,
+      servicesDecommissionCheck,
+      servicesJobsDistributedCacheCheck,
+      heartbeatCheck,
+      servicesUpCheck,
+      servicesTezDistributedCacheCheck,
+      configMergeCheck);
+
+  private static Set<String> pkPropertyIds = Collections.singleton(UPGRADE_CHECK_ID_PROPERTY_ID);
+
+  public static Set<String> propertyIds = Sets.newHashSet(
+      UPGRADE_CHECK_ID_PROPERTY_ID,
+      UPGRADE_CHECK_CHECK_PROPERTY_ID,
+      UPGRADE_CHECK_STATUS_PROPERTY_ID,
+      UPGRADE_CHECK_REASON_PROPERTY_ID,
+      UPGRADE_CHECK_FAILED_ON_PROPERTY_ID,
+      UPGRADE_CHECK_CHECK_TYPE_PROPERTY_ID,
+      UPGRADE_CHECK_CLUSTER_NAME_PROPERTY_ID,
+      UPGRADE_CHECK_REPOSITORY_VERSION_PROPERTY_ID);
 
-  @SuppressWarnings("serial")
-  public static Set<String> propertyIds = new HashSet<String>() {
-    {
-      add(UPGRADE_CHECK_ID_PROPERTY_ID);
-      add(UPGRADE_CHECK_CHECK_PROPERTY_ID);
-      add(UPGRADE_CHECK_STATUS_PROPERTY_ID);
-      add(UPGRADE_CHECK_REASON_PROPERTY_ID);
-      add(UPGRADE_CHECK_FAILED_ON_PROPERTY_ID);
-      add(UPGRADE_CHECK_CHECK_TYPE_PROPERTY_ID);
-      add(UPGRADE_CHECK_CLUSTER_NAME_PROPERTY_ID);
-      add(UPGRADE_CHECK_REPOSITORY_VERSION_PROPERTY_ID);
-    }
-  };
 
   @SuppressWarnings("serial")
   public static Map<Type, String> keyPropertyIds = new HashMap<Type, String>() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
index db5e956..2ed9846 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
@@ -108,6 +108,19 @@ public class RepositoryVersionDAO extends CrudDAO<RepositoryVersionEntity, Long>
   /**
    * Retrieves repository version by stack.
    *
+   * @param version version
+   * @return null if there is no suitable repository version
+   */
+  @RequiresSession
+  public List<RepositoryVersionEntity> findByVersion(String version) {
+    final TypedQuery<RepositoryVersionEntity> query = entityManagerProvider.get().createNamedQuery("repositoryVersionByVersion", RepositoryVersionEntity.class);
+    query.setParameter("version", version);
+    return daoUtils.selectList(query);
+  }
+
+  /**
+   * Retrieves repository version by stack.
+   *
    * @param stack
    *          stack with major version (like HDP-2.2)
    * @return null if there is no suitable repository version

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index dd5ac0a..bcd7071 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -63,7 +63,8 @@ import com.google.inject.Provider;
 @NamedQueries({
     @NamedQuery(name = "repositoryVersionByDisplayName", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.displayName=:displayname"),
     @NamedQuery(name = "repositoryVersionByStackVersion", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.stack.stackName=:stackName AND repoversion.stack.stackVersion=:stackVersion AND repoversion.version=:version"),
-    @NamedQuery(name = "repositoryVersionByStack", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.stack.stackName=:stackName AND repoversion.stack.stackVersion=:stackVersion")
+    @NamedQuery(name = "repositoryVersionByStack", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.stack.stackName=:stackName AND repoversion.stack.stackVersion=:stackVersion"),
+    @NamedQuery(name = "repositoryVersionByVersion", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.version=:version")
 })
 @StaticallyInject
 public class RepositoryVersionEntity {

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigMergeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigMergeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigMergeHelper.java
new file mode 100644
index 0000000..706e493
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigMergeHelper.java
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.commons.collections.CollectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.Inject;
+import com.google.inject.Provider;
+import com.google.inject.Singleton;
+
+/**
+ * Class that assists with merging configuration values across stacks.
+ */
+@Singleton
+public class ConfigMergeHelper {
+
+  private static final Pattern HEAP_PATTERN = Pattern.compile("(\\d+)([mgMG])");
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigMergeHelper.class);
+
+  @Inject
+  private Provider<Clusters> m_clusters;
+
+  @Inject
+  private Provider<AmbariMetaInfo> m_ambariMetaInfo;
+
+  @SuppressWarnings("unchecked")
+  public Map<String, Map<String, ThreeWayValue>> getConflicts(String clusterName, StackId targetStack) throws AmbariException {
+    Cluster cluster = m_clusters.get().getCluster(clusterName);
+    StackId oldStack = cluster.getCurrentStackVersion();
+
+    Map<String, Map<String, String>> oldMap = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> newMap = new HashMap<String, Map<String, String>>();
+
+    for (String serviceName : cluster.getServices().keySet()) {
+      Set<PropertyInfo> oldStackProperties = m_ambariMetaInfo.get().getServiceProperties(
+          oldStack.getStackName(), oldStack.getStackVersion(), serviceName);
+      addToMap(oldMap, oldStackProperties);
+
+      Set<PropertyInfo> newStackProperties = m_ambariMetaInfo.get().getServiceProperties(
+          targetStack.getStackName(), targetStack.getStackVersion(), serviceName);
+      addToMap(newMap, newStackProperties);
+    }
+
+    Set<PropertyInfo> set = m_ambariMetaInfo.get().getStackProperties(
+        oldStack.getStackName(), oldStack.getStackVersion());
+    addToMap(oldMap, set);
+
+    set = m_ambariMetaInfo.get().getStackProperties(
+        targetStack.getStackName(), targetStack.getStackVersion());
+    addToMap(newMap, set);
+
+    Map<String, Map<String, ThreeWayValue>> result =
+        new HashMap<String, Map<String, ThreeWayValue>>();
+
+    for (Entry<String, Map<String, String>> entry : oldMap.entrySet()) {
+      if (!newMap.containsKey(entry.getKey())) {
+        LOG.info("Stack {} does not have an equivalent config type {} in {}",
+            oldStack.getStackId(), entry.getKey(), targetStack.getStackId());
+        continue;
+      }
+
+      Map<String, String> oldPairs = entry.getValue();
+      Map<String, String> newPairs = newMap.get(entry.getKey());
+      Collection<String> customValueKeys = null;
+
+      Config config = cluster.getDesiredConfigByType(entry.getKey());
+      if (null != config) {
+        Set<String> valueKeys = config.getProperties().keySet();
+
+        customValueKeys = (Collection<String>) CollectionUtils.subtract(valueKeys, oldPairs.keySet());
+      }
+
+      if (null != customValueKeys) {
+        for (String prop : customValueKeys) {
+          String newVal = newPairs.get(prop);
+          String savedVal = config.getProperties().get(prop);
+          if (null != newVal && null != savedVal && !newVal.equals(savedVal)) {
+            ThreeWayValue twv = new ThreeWayValue();
+            twv.oldStackValue = null;
+            twv.newStackValue = normalizeValue(savedVal, newVal.trim());
+            twv.savedValue = savedVal.trim();
+
+            if (!result.containsKey(entry.getKey())) {
+              result.put(entry.getKey(), new HashMap<String, ThreeWayValue>());
+            }
+
+            result.get(entry.getKey()).put(prop, twv);
+          }
+        }
+      }
+
+
+      Collection<String> common = CollectionUtils.intersection(newPairs.keySet(),
+          oldPairs.keySet());
+
+      for (String prop : common) {
+        String oldVal = oldPairs.get(prop);
+        String newVal = newPairs.get(prop);
+        String savedVal = "";
+        if (null != config) {
+          savedVal = config.getProperties().get(prop);
+        }
+
+        if (!oldVal.equals(newVal) || !oldVal.equals(savedVal)) {
+          ThreeWayValue twv = new ThreeWayValue();
+          twv.oldStackValue = normalizeValue(savedVal, oldVal.trim());
+          twv.newStackValue = normalizeValue(savedVal, newVal.trim());
+          twv.savedValue = (null == savedVal) ? null : savedVal.trim();
+
+          if (!result.containsKey(entry.getKey())) {
+            result.put(entry.getKey(), new HashMap<String, ThreeWayValue>());
+          }
+
+          result.get(entry.getKey()).put(prop, twv);
+        }
+      }
+    }
+
+    return result;
+  }
+
+  private void addToMap(Map<String, Map<String, String>> map, Set<PropertyInfo> stackProperties) {
+    for (PropertyInfo pi : stackProperties) {
+      String type = ConfigHelper.fileNameToConfigType(pi.getFilename());
+
+      if (!map.containsKey(type)) {
+        map.put(type, new HashMap<String, String>());
+      }
+      map.get(type).put(pi.getName(), pi.getValue());
+    }
+
+  }
+  /**
+   * Represents the three different config values for merging.
+   */
+  public static class ThreeWayValue {
+    /**
+     * The previous stack-defined value.
+     */
+    public String oldStackValue;
+    /**
+     * The new stack-defined value.
+     */
+    public String newStackValue;
+    /**
+     * The saved stack value, possibly changed from previous stack-defined value.
+     */
+    public String savedValue;
+  }
+
+  private static String normalizeValue(String templateValue, String newRawValue) {
+    if (null == templateValue) {
+      return newRawValue;
+    }
+
+    Matcher m = HEAP_PATTERN.matcher(templateValue);
+
+    if (m.matches()) {
+      return newRawValue + m.group(2);
+    }
+
+    return newRawValue;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/main/java/org/apache/ambari/server/state/stack/PrereqCheckStatus.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/PrereqCheckStatus.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/PrereqCheckStatus.java
index 3384b85..f8009e3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/PrereqCheckStatus.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/PrereqCheckStatus.java
@@ -22,5 +22,6 @@ package org.apache.ambari.server.state.stack;
  */
 public enum PrereqCheckStatus {
   PASS,
+  WARNING,
   FAIL
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/test/java/org/apache/ambari/server/checks/ConfigurationMergeCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/ConfigurationMergeCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/ConfigurationMergeCheckTest.java
new file mode 100644
index 0000000..64eaa10
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/ConfigurationMergeCheckTest.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigMergeHelper;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Provider;
+//
+/**
+ * Unit tests for ServicesUpCheck
+ */
+public class ConfigurationMergeCheckTest {
+
+  private static final String CONFIG_FILE = "hdfs-site.xml";
+  private static final String CONFIG_TYPE = "hdfs-site";
+  private static final String CONFIG_PROPERTY = "hdfs.property";
+
+  private Clusters clusters = EasyMock.createMock(Clusters.class);
+  private Map<String, String> m_configMap = new HashMap<String, String>();
+
+  @Before
+  public void before() throws Exception {
+    Cluster cluster = EasyMock.createMock(Cluster.class);
+    StackId stackId = new StackId("HDP-1.0");
+
+    expect(cluster.getCurrentStackVersion()).andReturn(stackId).anyTimes();
+    expect(clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
+
+    expect(cluster.getServices()).andReturn(new HashMap<String, Service>() {{
+      put("HDFS", EasyMock.createMock(Service.class));
+    }}).anyTimes();
+
+
+    m_configMap.put(CONFIG_PROPERTY, "1024m");
+    Config config = EasyMock.createMock(Config.class);
+    expect(config.getProperties()).andReturn(m_configMap).anyTimes();
+
+    expect(cluster.getDesiredConfigByType(CONFIG_TYPE)).andReturn(config).anyTimes();
+
+    replay(clusters, cluster, config);
+  }
+
+  @Test
+  public void testApplicable() throws Exception {
+
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+
+    ConfigurationMergeCheck cmc = new ConfigurationMergeCheck();
+    Assert.assertFalse(cmc.isApplicable(request));
+
+    final RepositoryVersionDAO repositoryVersionDAO = EasyMock.createMock(RepositoryVersionDAO.class);
+    expect(repositoryVersionDAO.findByVersion("1.0")).andReturn(
+        Collections.singletonList(createFor("1.0"))).anyTimes();
+    expect(repositoryVersionDAO.findByVersion("1.1")).andReturn(
+        Collections.singletonList(createFor("1.1"))).anyTimes();
+    expect(repositoryVersionDAO.findByVersion("1.2")).andReturn(
+        Collections.<RepositoryVersionEntity>emptyList()).anyTimes();
+
+
+    replay(repositoryVersionDAO);
+
+    cmc.repositoryVersionDaoProvider = new Provider<RepositoryVersionDAO>() {
+      @Override
+      public RepositoryVersionDAO get() {
+        return repositoryVersionDAO;
+      }
+    };
+
+    cmc.clustersProvider = new Provider<Clusters>() {
+      @Override
+      public Clusters get() {
+        return clusters;
+      }
+    };
+
+    request.setRepositoryVersion("1.0");
+    Assert.assertFalse(cmc.isApplicable(request));
+
+    request.setRepositoryVersion("1.1");
+    Assert.assertTrue(cmc.isApplicable(request));
+
+    request.setRepositoryVersion("1.2");
+    Assert.assertFalse(cmc.isApplicable(request));
+
+  }
+
+  @Test
+  public void testPerform() throws Exception {
+    ConfigurationMergeCheck cmc = new ConfigurationMergeCheck();
+
+    final RepositoryVersionDAO repositoryVersionDAO = EasyMock.createMock(RepositoryVersionDAO.class);
+    expect(repositoryVersionDAO.findByVersion("1.0")).andReturn(
+        Collections.singletonList(createFor("1.0"))).anyTimes();
+    expect(repositoryVersionDAO.findByVersion("1.1")).andReturn(
+        Collections.singletonList(createFor("1.1"))).anyTimes();
+
+    replay(repositoryVersionDAO);
+
+    cmc.repositoryVersionDaoProvider = new Provider<RepositoryVersionDAO>() {
+      @Override
+      public RepositoryVersionDAO get() {
+        return repositoryVersionDAO;
+      }
+    };
+
+    cmc.clustersProvider = new Provider<Clusters>() {
+      @Override
+      public Clusters get() {
+        return clusters;
+      }
+    };
+
+    cmc.m_mergeHelper = new ConfigMergeHelper();
+    Field field = ConfigMergeHelper.class.getDeclaredField("m_clusters");
+    field.setAccessible(true);
+    field.set(cmc.m_mergeHelper, cmc.clustersProvider);
+
+    final AmbariMetaInfo ami = EasyMock.createMock(AmbariMetaInfo.class);
+
+
+    field = ConfigMergeHelper.class.getDeclaredField("m_ambariMetaInfo");
+    field.setAccessible(true);
+    field.set(cmc.m_mergeHelper, new Provider<AmbariMetaInfo>() {
+      @Override
+      public AmbariMetaInfo get() {
+        return ami;
+      }
+    });
+
+    PropertyInfo pi10 = new PropertyInfo();
+    pi10.setFilename(CONFIG_FILE);
+    pi10.setName(CONFIG_PROPERTY);
+    pi10.setValue("1024m");
+
+    PropertyInfo pi11 = new PropertyInfo();
+    pi11.setFilename(CONFIG_FILE);
+    pi11.setName(CONFIG_PROPERTY);
+    pi11.setValue("1024m");
+
+    expect(ami.getServiceProperties("HDP", "1.0", "HDFS")).andReturn(
+        Collections.singleton(pi10)).anyTimes();
+
+    expect(ami.getServiceProperties("HDP", "1.1", "HDFS")).andReturn(
+        Collections.singleton(pi11)).anyTimes();
+
+    expect(ami.getStackProperties(anyObject(String.class), anyObject(String.class))).andReturn(
+        Collections.<PropertyInfo>emptySet()).anyTimes();
+
+    replay(ami);
+
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setRepositoryVersion("1.1");
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, "cluster");
+    cmc.perform(check, request);
+    Assert.assertEquals("Expect no warnings", 0, check.getFailedOn().size());
+
+    check = new PrerequisiteCheck(null, "cluster");
+    pi11.setValue("1026m");
+    cmc.perform(check, request);
+    Assert.assertEquals("Expect warning when user-set has changed from new default",
+        1, check.getFailedOn().size());
+
+    check = new PrerequisiteCheck(null, "cluster");
+    pi11.setName(CONFIG_PROPERTY + ".foo");
+    cmc.perform(check, request);
+    Assert.assertEquals("Expect no warning when user new stack is empty",
+        0, check.getFailedOn().size());
+
+
+    check = new PrerequisiteCheck(null, "cluster");
+    pi11.setName(CONFIG_PROPERTY);
+    pi10.setName(CONFIG_PROPERTY + ".foo");
+    cmc.perform(check, request);
+    Assert.assertEquals("Expect warning when user old stack is empty, and value changed",
+        1, check.getFailedOn().size());
+
+  }
+
+  private RepositoryVersionEntity createFor(final String stackVersion) {
+    RepositoryVersionEntity entity = new RepositoryVersionEntity();
+
+    entity.setStack(new StackEntity() {
+      @Override
+      public String getStackVersion() {
+        return stackVersion;
+      }
+
+      @Override
+      public String getStackName() {
+        return "HDP";
+      }
+    });
+
+    return entity;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2684ad/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
index 96151af..c6c7e54 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.ambari.server.checks;
 
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -138,4 +139,62 @@ public class HostsRepositoryVersionCheckTest {
     hostsRepositoryVersionCheck.perform(check, new PrereqCheckRequest("cluster"));
     Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
   }
+
+  @Test
+  public void testPerformWithVersion() throws Exception {
+    final HostsRepositoryVersionCheck hostsRepositoryVersionCheck = new HostsRepositoryVersionCheck();
+    hostsRepositoryVersionCheck.clustersProvider = new Provider<Clusters>() {
+
+      @Override
+      public Clusters get() {
+        return clusters;
+      }
+    };
+    hostsRepositoryVersionCheck.repositoryVersionDaoProvider = new Provider<RepositoryVersionDAO>() {
+      @Override
+      public RepositoryVersionDAO get() {
+        return repositoryVersionDAO;
+      }
+    };
+    hostsRepositoryVersionCheck.hostVersionDaoProvider = new Provider<HostVersionDAO>() {
+      @Override
+      public HostVersionDAO get() {
+        return hostVersionDAO;
+      }
+    };
+
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(cluster.getDesiredStackVersion()).thenReturn(new StackId());
+    Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
+    final Map<String, Host> hosts = new HashMap<String, Host>();
+    final Host host1 = Mockito.mock(Host.class);
+    final Host host2 = Mockito.mock(Host.class);
+    final Host host3 = Mockito.mock(Host.class);
+    Mockito.when(host1.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
+    Mockito.when(host2.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
+    Mockito.when(host3.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
+    hosts.put("host1", host1);
+    hosts.put("host2", host2);
+    hosts.put("host3", host3);
+    Mockito.when(clusters.getHostsForCluster("cluster")).thenReturn(hosts);
+
+    RepositoryVersionEntity rve = new RepositoryVersionEntity();
+    rve.setVersion("1.1.1");
+
+    HostVersionEntity hve = new HostVersionEntity();
+    hve.setRepositoryVersion(rve);
+    hve.setState(RepositoryVersionState.INSTALLED);
+
+    Mockito.when(
+        hostVersionDAO.findByHost(Mockito.anyString())).thenReturn(
+            Collections.singletonList(hve));
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setRepositoryVersion("1.1.1");
+    hostsRepositoryVersionCheck.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+  }
+
 }