You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2017/10/11 08:38:39 UTC

[1/7] ambari git commit: AMBARI-22189. Ambari-server start failed after branch-3.0-perf branch merge. (mpapirkovskyy)

Repository: ambari
Updated Branches:
  refs/heads/branch-3.0-perf 857bef7cd -> 5ae98dbeb


AMBARI-22189. Ambari-server start failed after branch-3.0-perf branch merge. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3051fa9e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3051fa9e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3051fa9e

Branch: refs/heads/branch-3.0-perf
Commit: 3051fa9e7b1da1c3012af85729c9bde11c9013b3
Parents: 857bef7
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Tue Oct 10 18:32:42 2017 +0300
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Tue Oct 10 18:32:59 2017 +0300

----------------------------------------------------------------------
 .../org/apache/ambari/server/agent/AgentReportsProcessor.java    | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3051fa9e/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentReportsProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentReportsProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentReportsProcessor.java
index 586a16e..88c2665 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentReportsProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentReportsProcessor.java
@@ -52,9 +52,7 @@ public class AgentReportsProcessor {
   private UnitOfWork unitOfWork;
 
   @Inject
-  private Configuration configuration;
-
-  public AgentReportsProcessor() {
+  public AgentReportsProcessor(Configuration configuration) {
 
     ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat("agent-report-processor-%d").build();
     int poolSize = configuration.getAgentsReportThreadPoolSize();


[6/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

Posted by mp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index 1d5ee2b..99ecd76 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -87,6 +87,8 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
   protected static final String COMPONENT_TOTAL_COUNT_PROPERTY_ID     = "ServiceComponentInfo/total_count";
   protected static final String COMPONENT_STARTED_COUNT_PROPERTY_ID   = "ServiceComponentInfo/started_count";
   protected static final String COMPONENT_INSTALLED_COUNT_PROPERTY_ID = "ServiceComponentInfo/installed_count";
+  protected static final String COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID
+                                                                      = "ServiceComponentInfo/installed_and_maintenance_off_count";
   protected static final String COMPONENT_INIT_COUNT_PROPERTY_ID      = "ServiceComponentInfo/init_count";
   protected static final String COMPONENT_UNKNOWN_COUNT_PROPERTY_ID   = "ServiceComponentInfo/unknown_count";
   protected static final String COMPONENT_INSTALL_FAILED_COUNT_PROPERTY_ID = "ServiceComponentInfo/install_failed_count";
@@ -126,6 +128,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
     PROPERTY_IDS.add(COMPONENT_TOTAL_COUNT_PROPERTY_ID);
     PROPERTY_IDS.add(COMPONENT_STARTED_COUNT_PROPERTY_ID);
     PROPERTY_IDS.add(COMPONENT_INSTALLED_COUNT_PROPERTY_ID);
+    PROPERTY_IDS.add(COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID);
 
     PROPERTY_IDS.add(COMPONENT_INIT_COUNT_PROPERTY_ID);
     PROPERTY_IDS.add(COMPONENT_UNKNOWN_COUNT_PROPERTY_ID);
@@ -225,6 +228,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
       setResourceProperty(resource, COMPONENT_TOTAL_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("totalCount"), requestedIds);
       setResourceProperty(resource, COMPONENT_STARTED_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("startedCount"), requestedIds);
       setResourceProperty(resource, COMPONENT_INSTALLED_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("installedCount"), requestedIds);
+      setResourceProperty(resource, COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("installedAndMaintenanceOffCount"), requestedIds);
       setResourceProperty(resource, COMPONENT_INSTALL_FAILED_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("installFailedCount"), requestedIds);
       setResourceProperty(resource, COMPONENT_INIT_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("initCount"), requestedIds);
       setResourceProperty(resource, COMPONENT_UNKNOWN_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("unknownCount"), requestedIds);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index 2a45f02..737bfa4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -62,6 +62,7 @@ import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -84,6 +85,8 @@ public class ConfigGroupResourceProvider extends
     .getPropertyId("ConfigGroup", "group_name");
   protected static final String CONFIGGROUP_TAG_PROPERTY_ID = PropertyHelper
     .getPropertyId("ConfigGroup", "tag");
+  protected static final String CONFIGGROUP_SERVICENAME_PROPERTY_ID = PropertyHelper
+    .getPropertyId("ConfigGroup", "service_name");
   protected static final String CONFIGGROUP_DESC_PROPERTY_ID = PropertyHelper
     .getPropertyId("ConfigGroup", "description");
   protected static final String CONFIGGROUP_SCV_NOTE_ID = PropertyHelper
@@ -562,8 +565,8 @@ public class ConfigGroupResourceProvider extends
 
       verifyHostList(cluster, hosts, request);
 
-      String serviceName = null;
-      if (request.getConfigs() != null && !request.getConfigs().isEmpty()) {
+      String serviceName = request.getServiceName();
+      if (serviceName == null && !MapUtils.isEmpty(request.getConfigs())) {
         try {
           serviceName = cluster.getServiceForConfigTypes(request.getConfigs().keySet());
         } catch (IllegalArgumentException e) {
@@ -751,6 +754,7 @@ public class ConfigGroupResourceProvider extends
       (String) properties.get(CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID),
       (String) properties.get(CONFIGGROUP_NAME_PROPERTY_ID),
       (String) properties.get(CONFIGGROUP_TAG_PROPERTY_ID),
+      (String) properties.get(CONFIGGROUP_SERVICENAME_PROPERTY_ID),
       (String) properties.get(CONFIGGROUP_DESC_PROPERTY_ID),
       null,
       null);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
index 074f8e1..5e5bff5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
@@ -106,8 +106,6 @@ public class DefaultProviderModule extends AbstractProviderModule {
         return new UpgradeItemResourceProvider(managementController);
       case UpgradeSummary:
         return new UpgradeSummaryResourceProvider(managementController);
-      case ClusterStackVersion:
-        return new ClusterStackVersionResourceProvider(managementController);
       case PreUpgradeCheck:
         return new PreUpgradeCheckResourceProvider(managementController);
       case HostStackVersion:

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryResourceProvider.java
index 1d3e922..4814a33 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryResourceProvider.java
@@ -52,6 +52,8 @@ public class RepositoryResourceProvider extends AbstractControllerResourceProvid
   public static final String REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("Repositories", "cluster_version_id");
   public static final String REPOSITORY_OS_TYPE_PROPERTY_ID               = PropertyHelper.getPropertyId("Repositories", "os_type");
   public static final String REPOSITORY_BASE_URL_PROPERTY_ID              = PropertyHelper.getPropertyId("Repositories", "base_url");
+  public static final String REPOSITORY_DISTRIBUTION_PROPERTY_ID          = PropertyHelper.getPropertyId("Repositories", "distribution");
+  public static final String REPOSITORY_COMPONENTS_PROPERTY_ID            = PropertyHelper.getPropertyId("Repositories", "components");
   public static final String REPOSITORY_REPO_ID_PROPERTY_ID               = PropertyHelper.getPropertyId("Repositories", "repo_id");
   public static final String REPOSITORY_MIRRORS_LIST_PROPERTY_ID          = PropertyHelper.getPropertyId("Repositories", "mirrors_list");
   public static final String REPOSITORY_DEFAULT_BASE_URL_PROPERTY_ID      = PropertyHelper.getPropertyId("Repositories", "default_base_url");
@@ -74,6 +76,8 @@ public class RepositoryResourceProvider extends AbstractControllerResourceProvid
   public static Set<String> propertyIds = new HashSet<String>() {
     {
       add(REPOSITORY_REPO_NAME_PROPERTY_ID);
+      add(REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+      add(REPOSITORY_COMPONENTS_PROPERTY_ID);
       add(REPOSITORY_STACK_NAME_PROPERTY_ID);
       add(REPOSITORY_STACK_VERSION_PROPERTY_ID);
       add(REPOSITORY_OS_TYPE_PROPERTY_ID);
@@ -154,6 +158,8 @@ public class RepositoryResourceProvider extends AbstractControllerResourceProvid
         setResourceProperty(resource, REPOSITORY_STACK_NAME_PROPERTY_ID, response.getStackName(), requestedIds);
         setResourceProperty(resource, REPOSITORY_STACK_VERSION_PROPERTY_ID, response.getStackVersion(), requestedIds);
         setResourceProperty(resource, REPOSITORY_REPO_NAME_PROPERTY_ID, response.getRepoName(), requestedIds);
+        setResourceProperty(resource, REPOSITORY_DISTRIBUTION_PROPERTY_ID, response.getDistribution(), requestedIds);
+        setResourceProperty(resource, REPOSITORY_COMPONENTS_PROPERTY_ID, response.getComponents(), requestedIds);
         setResourceProperty(resource, REPOSITORY_BASE_URL_PROPERTY_ID, response.getBaseUrl(), requestedIds);
         setResourceProperty(resource, REPOSITORY_OS_TYPE_PROPERTY_ID, response.getOsType(), requestedIds);
         setResourceProperty(resource, REPOSITORY_REPO_ID_PROPERTY_ID, response.getRepoId(), requestedIds);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
index 44ef9f7..6455805 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
@@ -87,6 +87,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
   public static final String REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "repository_version");
   public static final String REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID       = PropertyHelper.getPropertyId("RepositoryVersions", "display_name");
   public static final String REPOSITORY_VERSION_HIDDEN_PROPERTY_ID             = PropertyHelper.getPropertyId("RepositoryVersions", "hidden");
+  public static final String REPOSITORY_VERSION_RESOLVED_PROPERTY_ID           = PropertyHelper.getPropertyId("RepositoryVersions", "resolved");
   public static final String SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID         = new OperatingSystemResourceDefinition().getPluralName();
   public static final String SUBRESOURCE_REPOSITORIES_PROPERTY_ID              = new RepositoryResourceDefinition().getPluralName();
 
@@ -121,7 +122,8 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       REPOSITORY_VERSION_PARENT_ID,
       REPOSITORY_VERSION_HAS_CHILDREN,
       REPOSITORY_VERSION_AVAILABLE_SERVICES,
-      REPOSITORY_VERSION_STACK_SERVICES);
+      REPOSITORY_VERSION_STACK_SERVICES,
+      REPOSITORY_VERSION_RESOLVED_PROPERTY_ID);
 
   @SuppressWarnings("serial")
   public static Map<Type, String> keyPropertyIds = new ImmutableMap.Builder<Type, String>()
@@ -257,7 +259,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       setResourceProperty(resource, REPOSITORY_VERSION_HIDDEN_PROPERTY_ID, entity.isHidden(), requestedIds);
       setResourceProperty(resource, REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID, entity.getVersion(), requestedIds);
       setResourceProperty(resource, REPOSITORY_VERSION_TYPE_PROPERTY_ID, entity.getType(), requestedIds);
-
+      setResourceProperty(resource, REPOSITORY_VERSION_RESOLVED_PROPERTY_ID, entity.isResolved(), requestedIds);
       setResourceProperty(resource, REPOSITORY_VERSION_PARENT_ID, entity.getParentId(), requestedIds);
 
       List<RepositoryVersionEntity> children = entity.getChildren();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index 3f8b6e9..526f173 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -427,6 +427,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
     // do all validation checks
     validateCreateRequests(requests, clusters);
 
+    Set<Cluster> clustersSetFromRequests = new HashSet<>();
+
     for (ServiceRequest request : requests) {
       Cluster cluster = clusters.getCluster(request.getClusterName());
 
@@ -482,6 +484,12 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
 
       // Initialize service widgets
       getManagementController().initializeWidgetsAndLayouts(cluster, s);
+      clustersSetFromRequests.add(cluster);
+    }
+
+    // Create cluster widgets and layouts
+    for (Cluster cluster : clustersSetFromRequests) {
+      getManagementController().initializeWidgetsAndLayouts(cluster, null);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 0ff21a2..52f66bc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -731,11 +731,12 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
               itemEntity.setText(wrapper.getText());
               itemEntity.setTasks(wrapper.getTasksJson());
               itemEntity.setHosts(wrapper.getHostsJson());
-              itemEntities.add(itemEntity);
 
               injectVariables(configHelper, cluster, itemEntity);
-              makeServerSideStage(group, upgradeContext, effectiveRepositoryVersion, req,
-                  itemEntity, (ServerSideActionTask) task, configUpgradePack);
+              if (makeServerSideStage(group, upgradeContext, effectiveRepositoryVersion, req,
+                  itemEntity, (ServerSideActionTask) task, configUpgradePack)) {
+                itemEntities.add(itemEntity);
+              }
             }
           }
         } else {
@@ -1184,7 +1185,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
    * upgrade
    * @throws AmbariException
    */
-  private void makeServerSideStage(UpgradeGroupHolder group, UpgradeContext context,
+  private boolean makeServerSideStage(UpgradeGroupHolder group, UpgradeContext context,
       RepositoryVersionEntity effectiveRepositoryVersion, RequestStageContainer request,
       UpgradeItemEntity entity, ServerSideActionTask task, ConfigUpgradePack configUpgradePack)
       throws AmbariException {
@@ -1201,6 +1202,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     String itemDetail = entity.getText();
     String stageText = StringUtils.abbreviate(entity.getText(), 255);
 
+    boolean process = true;
+
     switch (task.getType()) {
       case SERVER_ACTION:
       case MANUAL: {
@@ -1236,6 +1239,13 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       }
       case CONFIGURE: {
         ConfigureTask ct = (ConfigureTask) task;
+
+        // !!! would prefer to do this in the sequence generator, but there's too many
+        // places to miss
+        if (context.getOrchestrationType().isRevertable() && !ct.supportsPatch) {
+          process = false;
+        }
+
         Map<String, String> configurationChanges =
                 ct.getConfigurationChanges(cluster, configUpgradePack);
 
@@ -1266,6 +1276,10 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         break;
     }
 
+    if (!process) {
+      return false;
+    }
+
     ActionExecutionContext actionContext = new ActionExecutionContext(cluster.getClusterName(),
         Role.AMBARI_SERVER_ACTION.toString(), Collections.emptyList(),
         commandParams);
@@ -1303,6 +1317,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         context.isComponentFailureAutoSkipped());
 
     request.addStages(Collections.singletonList(stage));
+
+    return true;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
index 394a0bf..f94ac32 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
@@ -770,6 +770,10 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
             repo.getRepositoryId());
         repoElement.put(PropertyHelper.getPropertyName(RepositoryResourceProvider.REPOSITORY_REPO_NAME_PROPERTY_ID),
             repo.getName());
+        repoElement.put(PropertyHelper.getPropertyName(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID),
+            repo.getDistribution());
+        repoElement.put(PropertyHelper.getPropertyName(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID),
+            repo.getComponents());
         repoElement.put(PropertyHelper.getPropertyName(RepositoryResourceProvider.REPOSITORY_STACK_NAME_PROPERTY_ID),
             entity.getStackName());
         repoElement.put(PropertyHelper.getPropertyName(RepositoryResourceProvider.REPOSITORY_STACK_VERSION_PROPERTY_ID),

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/events/AlertReceivedEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/AlertReceivedEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/AlertReceivedEvent.java
index e0c8be9e..f09e3ba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/AlertReceivedEvent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/AlertReceivedEvent.java
@@ -27,6 +27,12 @@ import org.apache.ambari.server.state.Alert;
  */
 public final class AlertReceivedEvent extends AlertEvent {
 
+  /**
+   * Constructor.
+   *
+   * @param clusterId
+   * @param alert
+   */
   public AlertReceivedEvent(long clusterId, Alert alert) {
     super(clusterId, alert);
   }
@@ -41,7 +47,7 @@ public final class AlertReceivedEvent extends AlertEvent {
   @Override
   public String toString() {
     StringBuilder buffer = new StringBuilder("AlertReceivedEvent{");
-    buffer.append("clusterId=").append(m_clusterId);
+    buffer.append("cluserId=").append(m_clusterId);
     buffer.append(", alerts=").append(getAlerts());
     buffer.append("}");
     return buffer.toString();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
index cdb86ac..f0574d0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
@@ -23,17 +23,27 @@ package org.apache.ambari.server.events;
  * cluster configuration is successfully updated.
  */
 public class ClusterConfigFinishedEvent extends AmbariEvent {
+
+  private final long clusterId;
   private final String clusterName;
 
 
-  public ClusterConfigFinishedEvent(String clusterName) {
+  public ClusterConfigFinishedEvent(long clusterId, String clusterName) {
     super(AmbariEventType.CLUSTER_CONFIG_FINISHED);
+    this.clusterId = clusterId;
     this.clusterName = clusterName;
   }
 
   /**
+   * Get the cluster id
+   * @return
+   */
+  public long getClusterId() {
+    return clusterId;
+  }
+
+  /**
    * Get the cluster name
-   *
    * @return
    */
   public String getClusterName() {
@@ -46,6 +56,7 @@ public class ClusterConfigFinishedEvent extends AmbariEvent {
   @Override
   public String toString() {
     StringBuilder buffer = new StringBuilder("ClusterConfigChangedEvent{");
+    buffer.append("clusterId=").append(getClusterId());
     buffer.append("clusterName=").append(getClusterName());
     buffer.append("}");
     return buffer.toString();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/DistributeRepositoriesActionListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/DistributeRepositoriesActionListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/DistributeRepositoriesActionListener.java
index aa29894..1944761 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/DistributeRepositoriesActionListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/DistributeRepositoriesActionListener.java
@@ -124,8 +124,16 @@ public class DistributeRepositoriesActionListener {
         if (null != repoVersion && StringUtils.isNotBlank(actualVersion)) {
           if (!StringUtils.equals(repoVersion.getVersion(), actualVersion)) {
             repoVersion.setVersion(actualVersion);
+            repoVersion.setResolved(true);
             repoVersionDAO.merge(repoVersion);
             repositoryVersion = actualVersion;
+          } else {
+            // the reported versions are the same - we should ensure that the
+            // repo is resolved
+            if (!repoVersion.isResolved()) {
+              repoVersion.setResolved(true);
+              repoVersionDAO.merge(repoVersion);
+            }
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
index 394e9f9..4329cdb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
@@ -98,8 +98,16 @@ public class StackVersionListener {
       if (null != rve) {
         String currentRepoVersion = rve.getVersion();
         if (!StringUtils.equals(currentRepoVersion, newVersion)) {
-            rve.setVersion(newVersion);
+          rve.setVersion(newVersion);
+          rve.setResolved(true);
+          repositoryVersionDAO.merge(rve);
+        } else {
+          // the reported versions are the same - we should ensure that the repo
+          // is resolved
+          if (!rve.isResolved()) {
+            rve.setResolved(true);
             repositoryVersionDAO.merge(rve);
+          }
         }
       }
     }
@@ -236,4 +244,4 @@ public class StackVersionListener {
     sch.setUpgradeState(upgradeState);
     sch.recalculateHostVersionState();
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
index 1ef415a..096a395 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/CachedRoleCommandOrderProvider.java
@@ -31,11 +31,13 @@ import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 import com.google.inject.Injector;
+import com.google.inject.Singleton;
 
 /**
  * RoleCommandOrderProvider which caches RoleCommandOrder objects for a cluster to avoid the cost of construction of
  * RoleCommandOrder objects each time.
  */
+@Singleton
 public class CachedRoleCommandOrderProvider implements RoleCommandOrderProvider {
 
   private static final Logger LOG = LoggerFactory.getLogger(CachedRoleCommandOrderProvider.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
index b137705..26670fc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
@@ -653,12 +653,10 @@ public class DBAccessorImpl implements DBAccessor {
   }
 
   @Override
-  public void alterColumn(String tableName, DBColumnInfo columnInfo)
-          throws SQLException {
+  public void alterColumn(String tableName, DBColumnInfo columnInfo) throws SQLException {
     //varchar extension only (derby limitation, but not too much for others),
     if (dbmsHelper.supportsColumnTypeChange()) {
-      String statement = dbmsHelper.getAlterColumnStatement(tableName,
-              columnInfo);
+      String statement = dbmsHelper.getAlterColumnStatement(tableName, columnInfo);
       executeQuery(statement);
     } else {
       //use addColumn: add_tmp-update-drop-rename for Derby

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
index 3ca9d3a..227a935 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
@@ -37,6 +37,7 @@ import org.apache.ambari.server.orm.entities.AlertGroupEntity;
 import org.apache.ambari.server.state.alert.AlertDefinition;
 import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
 import org.apache.ambari.server.state.alert.Scope;
+import org.apache.ambari.server.state.alert.SourceType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -308,6 +309,18 @@ public class AlertDefinitionDAO {
   }
 
   /**
+   * @return all definitions with the given sourceType
+   */
+  @RequiresSession
+  public List<AlertDefinitionEntity> findBySourceType(Long clusterId, SourceType sourceType) {
+    return daoUtils.selectList(
+      entityManagerProvider.get()
+        .createNamedQuery("AlertDefinitionEntity.findBySourceType", AlertDefinitionEntity.class)
+        .setParameter("clusterId", clusterId)
+        .setParameter("sourceType", sourceType));
+  }
+
+  /**
    * Persists a new alert definition, also creating the associated
    * {@link AlertGroupEntity} relationship for the definition's service default
    * group. Fires an {@link AlertDefinitionRegistrationEvent}.

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
index d0f8d0b..a1b6fbe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
@@ -356,9 +356,29 @@ public class ClusterDAO {
    *          the entity to merge (not {@code null}).
    * @return the managed entity which was merged (never {@code null}).
    */
+  @Transactional
   public ClusterConfigEntity merge(ClusterConfigEntity clusterConfigEntity) {
+    return merge(clusterConfigEntity, false);
+  }
+
+  /**
+   * Merge the specified entity into the current persistence context.
+   *
+   * @param clusterConfigEntity
+   *          the entity to merge (not {@code null}).
+   * @param flush
+   *          if {@code true} then {@link EntityManager#flush()} will be invoked
+   *          immediately after the merge.
+   * @return the managed entity which was merged (never {@code null}).
+   */
+  @Transactional
+  public ClusterConfigEntity merge(ClusterConfigEntity clusterConfigEntity, boolean flush) {
     EntityManager entityManager = entityManagerProvider.get();
-    return entityManager.merge(clusterConfigEntity);
+    ClusterConfigEntity clusterConfigEntityRes = entityManager.merge(clusterConfigEntity);
+    if(flush) {
+      entityManager.flush();
+    }
+    return clusterConfigEntityRes;
   }
 
   @Transactional

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
index f634d89..010ccec 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
@@ -39,6 +39,7 @@ import javax.persistence.metamodel.SingularAttribute;
 import org.apache.ambari.annotations.TransactionalLock;
 import org.apache.ambari.annotations.TransactionalLock.LockArea;
 import org.apache.ambari.annotations.TransactionalLock.LockType;
+import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
@@ -964,6 +965,24 @@ public class HostRoleCommandDAO {
   }
 
   /**
+   * Gets the most recently run service check grouped by the command's role
+   * (which is the only way to identify the service it was for!?)
+   *
+   * @param clusterId
+   *          the ID of the cluster to get the service checks for.
+   */
+  @RequiresSession
+  public List<LastServiceCheckDTO> getLatestServiceChecksByRole(long clusterId) {
+    TypedQuery<LastServiceCheckDTO> query = entityManagerProvider.get().createNamedQuery(
+        "HostRoleCommandEntity.findLatestServiceChecksByRole", LastServiceCheckDTO.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("roleCommand", RoleCommand.SERVICE_CHECK);
+
+    return daoUtils.selectList(query);
+  }
+
+  /**
    * The {@link HostRoleCommandPredicateVisitor} is used to convert an Ambari
    * {@link Predicate} into a JPA {@link javax.persistence.criteria.Predicate}.
    */
@@ -1010,4 +1029,32 @@ public class HostRoleCommandDAO {
 
     return Sets.newHashSet(taskIds);
   }
+
+  /**
+   * A simple DTO for storing the most recent service check time for a given
+   * {@link Role}.
+   */
+  public static class LastServiceCheckDTO {
+
+    /**
+     * The role.
+     */
+    public final String role;
+
+    /**
+     * The time that the service check ended.
+     */
+    public final long endTime;
+
+    /**
+     * Constructor.
+     *
+     * @param role
+     * @param endTime
+     */
+    public LastServiceCheckDTO(String role, long endTime) {
+      this.role = role;
+      this.endTime = endTime;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
index 94f5fa5..978540a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
@@ -197,7 +197,7 @@ public class ServiceConfigDAO {
         stackId.getStackVersion());
 
     TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().createNamedQuery(
-        "ServiceConfigEntity.findServiceConfigsByStack",
+        "ServiceConfigEntity.findAllServiceConfigsByStack",
         ServiceConfigEntity.class);
 
     query.setParameter("clusterId", clusterId);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
index de203be..2f4941b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
@@ -72,8 +72,8 @@ import org.apache.ambari.server.state.alert.SourceType;
   @NamedQuery(name = "AlertDefinitionEntity.findByService", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.serviceName = :serviceName AND ad.clusterId = :clusterId"),
   @NamedQuery(name = "AlertDefinitionEntity.findByServiceAndComponent", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.serviceName = :serviceName AND ad.componentName = :componentName AND ad.clusterId = :clusterId"),
   @NamedQuery(name = "AlertDefinitionEntity.findByServiceMaster", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.serviceName IN :services AND ad.scope = :scope AND ad.clusterId = :clusterId AND ad.componentName IS NULL"),
-  @NamedQuery(name = "AlertDefinitionEntity.findByIds", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.definitionId IN :definitionIds")})
-
+  @NamedQuery(name = "AlertDefinitionEntity.findByIds", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.definitionId IN :definitionIds"),
+  @NamedQuery(name = "AlertDefinitionEntity.findBySourceType", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.clusterId = :clusterId AND ad.sourceType = :sourceType")})
 public class AlertDefinitionEntity {
 
   @Id

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
index bfc83ca..0eea7e6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
@@ -60,22 +60,54 @@ import org.apache.commons.lang.ArrayUtils;
     , initialValue = 1
 )
 @NamedQueries({
-    @NamedQuery(name = "HostRoleCommandEntity.findTaskIdsByRequestStageIds", query = "SELECT command.taskId FROM HostRoleCommandEntity command WHERE command.stageId = :stageId AND command.requestId = :requestId"),
-    @NamedQuery(name = "HostRoleCommandEntity.findCountByCommandStatuses", query = "SELECT COUNT(command.taskId) FROM HostRoleCommandEntity command WHERE command.status IN :statuses"),
-    @NamedQuery(name = "HostRoleCommandEntity.findByRequestIdAndStatuses", query="SELECT task FROM HostRoleCommandEntity task WHERE task.requestId=:requestId AND task.status IN :statuses ORDER BY task.taskId ASC"),
-    @NamedQuery(name = "HostRoleCommandEntity.findTasksByStatusesOrderByIdDesc", query = "SELECT task FROM HostRoleCommandEntity task WHERE task.requestId = :requestId AND task.status IN :statuses ORDER BY task.taskId DESC"),
-    @NamedQuery(name = "HostRoleCommandEntity.findNumTasksAlreadyRanInStage", query = "SELECT COUNT(task.taskId) FROM HostRoleCommandEntity task WHERE task.requestId = :requestId AND task.taskId > :taskId AND task.stageId > :stageId AND task.status NOT IN :statuses"),
-    @NamedQuery(name = "HostRoleCommandEntity.findByCommandStatuses", query = "SELECT command FROM HostRoleCommandEntity command WHERE command.status IN :statuses ORDER BY command.requestId, command.stageId"),
-    @NamedQuery(name = "HostRoleCommandEntity.findByHostId", query = "SELECT command FROM HostRoleCommandEntity command WHERE command.hostId=:hostId"),
-    @NamedQuery(name = "HostRoleCommandEntity.findByHostRole", query = "SELECT command FROM HostRoleCommandEntity command WHERE command.hostEntity.hostName=:hostName AND command.requestId=:requestId AND command.stageId=:stageId AND command.role=:role ORDER BY command.taskId"),
-    @NamedQuery(name = "HostRoleCommandEntity.findByHostRoleNullHost", query = "SELECT command FROM HostRoleCommandEntity command WHERE command.hostEntity IS NULL AND command.requestId=:requestId AND command.stageId=:stageId AND command.role=:role"),
-    @NamedQuery(name = "HostRoleCommandEntity.findByStatusBetweenStages", query = "SELECT command FROM HostRoleCommandEntity command WHERE command.requestId = :requestId AND command.stageId >= :minStageId AND command.stageId <= :maxStageId AND command.status = :status"),
-    @NamedQuery(name = "HostRoleCommandEntity.updateAutoSkipExcludeRoleCommand", query = "UPDATE HostRoleCommandEntity command SET command.autoSkipOnFailure = :autoSkipOnFailure WHERE command.requestId = :requestId AND command.roleCommand <> :roleCommand"),
-    @NamedQuery(name = "HostRoleCommandEntity.updateAutoSkipForRoleCommand", query = "UPDATE HostRoleCommandEntity command SET command.autoSkipOnFailure = :autoSkipOnFailure WHERE command.requestId = :requestId AND command.roleCommand = :roleCommand"),
-    @NamedQuery(name = "HostRoleCommandEntity.removeByTaskIds", query = "DELETE FROM HostRoleCommandEntity command WHERE command.taskId IN :taskIds"),
-    @NamedQuery(name = "HostRoleCommandEntity.findHostsByCommandStatus", query = "SELECT DISTINCT(host.hostName) FROM HostRoleCommandEntity command, HostEntity host WHERE (command.requestId >= :iLowestRequestIdInProgress AND command.requestId <= :iHighestRequestIdInProgress) AND command.status IN :statuses AND command.hostId = host.hostId AND host.hostName IS NOT NULL"),
-    @NamedQuery(name = "HostRoleCommandEntity.getBlockingHostsForRequest", query = "SELECT DISTINCT(host.hostName) FROM HostRoleCommandEntity command, HostEntity host WHERE command.requestId >= :lowerRequestIdInclusive AND command.requestId < :upperRequestIdExclusive AND command.status IN :statuses AND command.isBackgroundCommand=0 AND command.hostId = host.hostId AND host.hostName IS NOT NULL")
-
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findTaskIdsByRequestStageIds",
+        query = "SELECT command.taskId FROM HostRoleCommandEntity command WHERE command.stageId = :stageId AND command.requestId = :requestId"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findCountByCommandStatuses",
+        query = "SELECT COUNT(command.taskId) FROM HostRoleCommandEntity command WHERE command.status IN :statuses"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findByRequestIdAndStatuses",
+        query = "SELECT task FROM HostRoleCommandEntity task WHERE task.requestId=:requestId AND task.status IN :statuses ORDER BY task.taskId ASC"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findTasksByStatusesOrderByIdDesc",
+        query = "SELECT task FROM HostRoleCommandEntity task WHERE task.requestId = :requestId AND task.status IN :statuses ORDER BY task.taskId DESC"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findNumTasksAlreadyRanInStage",
+        query = "SELECT COUNT(task.taskId) FROM HostRoleCommandEntity task WHERE task.requestId = :requestId AND task.taskId > :taskId AND task.stageId > :stageId AND task.status NOT IN :statuses"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findByCommandStatuses",
+        query = "SELECT command FROM HostRoleCommandEntity command WHERE command.status IN :statuses ORDER BY command.requestId, command.stageId"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findByHostId",
+        query = "SELECT command FROM HostRoleCommandEntity command WHERE command.hostId=:hostId"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findByHostRole",
+        query = "SELECT command FROM HostRoleCommandEntity command WHERE command.hostEntity.hostName=:hostName AND command.requestId=:requestId AND command.stageId=:stageId AND command.role=:role ORDER BY command.taskId"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findByHostRoleNullHost",
+        query = "SELECT command FROM HostRoleCommandEntity command WHERE command.hostEntity IS NULL AND command.requestId=:requestId AND command.stageId=:stageId AND command.role=:role"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findByStatusBetweenStages",
+        query = "SELECT command FROM HostRoleCommandEntity command WHERE command.requestId = :requestId AND command.stageId >= :minStageId AND command.stageId <= :maxStageId AND command.status = :status"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.updateAutoSkipExcludeRoleCommand",
+        query = "UPDATE HostRoleCommandEntity command SET command.autoSkipOnFailure = :autoSkipOnFailure WHERE command.requestId = :requestId AND command.roleCommand <> :roleCommand"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.updateAutoSkipForRoleCommand",
+        query = "UPDATE HostRoleCommandEntity command SET command.autoSkipOnFailure = :autoSkipOnFailure WHERE command.requestId = :requestId AND command.roleCommand = :roleCommand"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.removeByTaskIds",
+        query = "DELETE FROM HostRoleCommandEntity command WHERE command.taskId IN :taskIds"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findHostsByCommandStatus",
+        query = "SELECT DISTINCT(host.hostName) FROM HostRoleCommandEntity command, HostEntity host WHERE (command.requestId >= :iLowestRequestIdInProgress AND command.requestId <= :iHighestRequestIdInProgress) AND command.status IN :statuses AND command.hostId = host.hostId AND host.hostName IS NOT NULL"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.getBlockingHostsForRequest",
+        query = "SELECT DISTINCT(host.hostName) FROM HostRoleCommandEntity command, HostEntity host WHERE command.requestId >= :lowerRequestIdInclusive AND command.requestId < :upperRequestIdExclusive AND command.status IN :statuses AND command.isBackgroundCommand=0 AND command.hostId = host.hostId AND host.hostName IS NOT NULL"),
+    @NamedQuery(
+        name = "HostRoleCommandEntity.findLatestServiceChecksByRole",
+        query = "SELECT NEW org.apache.ambari.server.orm.dao.HostRoleCommandDAO.LastServiceCheckDTO(command.role, MAX(command.endTime)) FROM HostRoleCommandEntity command WHERE command.roleCommand = :roleCommand AND command.endTime > 0 AND command.stage.clusterId = :clusterId GROUP BY command.role ORDER BY command.role ASC")
 })
 public class HostRoleCommandEntity {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryEntity.java
index bad8195..6d7498b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryEntity.java
@@ -23,6 +23,8 @@ package org.apache.ambari.server.orm.entities;
 public class RepositoryEntity {
 
   private String name;
+  private String distribution;
+  private String components;
   private String baseUrl;
   private String repositoryId;
   private String mirrorsList;
@@ -36,6 +38,22 @@ public class RepositoryEntity {
     this.name = name;
   }
 
+  public String getDistribution() {
+    return distribution;
+  }
+
+  public void setDistribution(String distribution) {
+    this.distribution = distribution;
+  }
+
+  public String getComponents() {
+    return components;
+  }
+
+  public void setComponents(String components) {
+    this.components = components;
+  }
+
   public String getBaseUrl() {
     return baseUrl;
   }
@@ -60,6 +78,8 @@ public class RepositoryEntity {
     RepositoryEntity that = (RepositoryEntity) o;
 
     if (name != null ? !name.equals(that.name) : that.name != null) return false;
+    if (distribution != null ? !distribution.equals(that.distribution) : that.distribution != null) return false;
+    if (components != null ? !components.equals(that.components) : that.components != null) return false;
     if (baseUrl != null ? !baseUrl.equals(that.baseUrl) : that.baseUrl != null) return false;
     if (repositoryId != null ? !repositoryId.equals(that.repositoryId) : that.repositoryId != null) return false;
 
@@ -69,6 +89,8 @@ public class RepositoryEntity {
   @Override
   public int hashCode() {
     int result = name != null ? name.hashCode() : 0;
+    result = 31 * result + (distribution != null ? distribution.hashCode() : 0);
+    result = 31 * result + (components != null ? components.hashCode() : 0);
     result = 31 * result + (baseUrl != null ? baseUrl.hashCode() : 0);
     result = 31 * result + (repositoryId != null ? repositoryId.hashCode() : 0);
     return result;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index bf9e678..2b56b11 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -147,6 +147,16 @@ public class RepositoryVersionEntity {
   @Column(name = "hidden", nullable = false, insertable = true, updatable = true)
   private short isHidden = 0;
 
+  /**
+   * Repositories can't be trusted until they have been deployed and we've
+   * detected their actual version. Most of the time, things match up, but
+   * editing a VDF could causes the version to be misrepresented. Once we have
+   * received the correct version of the repository (normally after it's been
+   * installed), then we can set this flag to {@code true}.
+   */
+  @Column(name = "resolved", nullable = false)
+  private short resolved = 0;
+
   @ManyToOne
   @JoinColumn(name = "parent_id")
   private RepositoryVersionEntity parent;
@@ -217,6 +227,13 @@ public class RepositoryVersionEntity {
     return version;
   }
 
+  /**
+   * Sets the version on this repository version entity. If the version is
+   * confirmed as correct, then the called should also set
+   * {@link #setResolved(boolean)}.
+   *
+   * @param version
+   */
   public void setVersion(String version) {
     this.version = version;
   }
@@ -459,4 +476,24 @@ public class RepositoryVersionEntity {
     this.isHidden = (short) (isHidden ? 1 : 0);
   }
 
+  /**
+   * Gets whether this repository has been installed and has reported back its
+   * actual version.
+   *
+   * @return {@code true} if the version for this repository can be trusted,
+   *         {@code false} otherwise.
+   */
+  public boolean isResolved() {
+    return resolved == 1;
+  }
+
+  /**
+   * Sets whether this repository has been installed and has reported back its
+   * actual version.
+   *
+   * @param resolved
+   */
+  public void setResolved(boolean resolved) {
+    this.resolved = resolved ? (short) 1 : (short) 0;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeItemEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeItemEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeItemEntity.java
index 7f0408f..0c254ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeItemEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeItemEntity.java
@@ -76,7 +76,7 @@ public class UpgradeItemEntity {
   private String tasks = null;
 
   @Basic
-  @Column(name = "item_text", length = 1024)
+  @Column(name = "item_text")
   private String itemText = null;
 
   @Basic

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
index 95fc973..e804961 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
@@ -35,6 +35,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.Map;
+import java.util.regex.Pattern;
 
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.SSLContext;
@@ -116,6 +117,8 @@ public class ExecutionScheduleManager {
   protected static final String REQUESTS_TIMEDOUT_TASKS_KEY = "timed_out_task_count";
   protected static final String REQUESTS_TOTAL_TASKS_KEY = "task_count";
 
+  protected static final Pattern CONTAINS_API_VERSION_PATTERN = Pattern.compile("^/?" + DEFAULT_API_PATH+ ".*");
+
   @Inject
   public ExecutionScheduleManager(Configuration configuration,
                                   ExecutionScheduler executionScheduler,
@@ -663,7 +666,7 @@ public class ExecutionScheduleManager {
   }
 
   protected BatchRequestResponse performApiGetRequest(String relativeUri, boolean queryAllFields) {
-    WebResource webResource = ambariWebResource.path(completeRelativeUri(relativeUri));
+    WebResource webResource = extendApiResource(ambariWebResource, relativeUri);
     if (queryAllFields) {
       webResource = webResource.queryParam("fields", "*");
     }
@@ -679,7 +682,8 @@ public class ExecutionScheduleManager {
   protected BatchRequestResponse performApiRequest(String relativeUri, String body, String method, Integer userId) {
     ClientResponse response;
     try {
-      response = ambariWebResource.path(completeRelativeUri(relativeUri)).header(USER_ID_HEADER, userId).method(method, ClientResponse.class, body);
+      response = extendApiResource(ambariWebResource, relativeUri)
+          .header(USER_ID_HEADER, userId).method(method, ClientResponse.class, body);
     } catch (UniformInterfaceException e) {
       response = e.getResponse();
     }
@@ -796,15 +800,18 @@ public class ExecutionScheduleManager {
     }
   }
 
-  private String completeRelativeUri(String relativeUri){
-    if (StringUtils.isNotEmpty(relativeUri)
-        && !(relativeUri.startsWith("api/v1") || relativeUri.startsWith("/api/v1"))){
-      if (relativeUri.charAt(0) != '/') {
-        relativeUri = '/' + relativeUri;
-      }
-      return "api/v1" + relativeUri;
+  /**
+   * Returns the absolute web resource with {@link #DEFAULT_API_PATH}
+   * @param webResource Ambari WebResource as provided by the client {@link #ambariWebResource}
+   * @param relativeUri relative request URI
+   * @return  Extended WebResource
+   */
+  protected WebResource extendApiResource(WebResource webResource, String relativeUri) {
+    WebResource result = webResource;
+    if (StringUtils.isNotEmpty(relativeUri) && !CONTAINS_API_VERSION_PATTERN.matcher(relativeUri).matches()) {
+      result = webResource.path(DEFAULT_API_PATH);
     }
-    return relativeUri;
+    return result.path(relativeUri);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index d6b8ffc..3db844a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -76,12 +76,13 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
                                     Map<String, Map<String, String>> currentConfigurations,
                                     Map<String, Map<String, String>> kerberosConfigurations,
                                     boolean includeAmbariIdentity,
-                                    Map<String, Set<String>> propertiesToBeIgnored) throws AmbariException {
+                                    Map<String, Set<String>> propertiesToBeIgnored,
+                                    boolean excludeHeadless) throws AmbariException {
     List<Component> components = new ArrayList<>();
     for (ServiceComponentHost each : schToProcess) {
       components.add(Component.fromServiceComponentHost(each));
     }
-    processServiceComponents(cluster, kerberosDescriptor, components, identityFilter, dataDirectory, currentConfigurations, kerberosConfigurations, includeAmbariIdentity, propertiesToBeIgnored);
+    processServiceComponents(cluster, kerberosDescriptor, components, identityFilter, dataDirectory, currentConfigurations, kerberosConfigurations, includeAmbariIdentity, propertiesToBeIgnored, excludeHeadless);
   }
 
   protected void processServiceComponents(Cluster cluster, KerberosDescriptor kerberosDescriptor,
@@ -90,7 +91,8 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
                                           Map<String, Map<String, String>> currentConfigurations,
                                           Map<String, Map<String, String>> kerberosConfigurations,
                                           boolean includeAmbariIdentity,
-                                          Map<String, Set<String>> propertiesToBeIgnored) throws AmbariException {
+                                          Map<String, Set<String>> propertiesToBeIgnored,
+                                          boolean excludeHeadless) throws AmbariException {
 
     actionLog.writeStdOut("Processing Kerberos identities and configurations");
 
@@ -141,7 +143,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
 
             // Add service-level principals (and keytabs)
             kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
-                identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+                identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);
             propertiesToIgnore = gatherPropertiesToIgnore(serviceIdentities, propertiesToIgnore);
 
             KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
@@ -156,7 +158,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
 
               // Add component-level principals (and keytabs)
               kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
-                  identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+                  identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);
               propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
             }
           }
@@ -177,7 +179,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
 
               List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(identity);
               kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
-                  identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations);
+                  identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);
               propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
index a23ab5d..4396a2b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
@@ -217,9 +217,14 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
                 return commandReport;
               }
 
+              boolean regenerateKeytabs = "true".equalsIgnoreCase(getCommandParameterValue(getCommandParameters(), REGENERATE_ALL));
+              boolean onlyKeytabWrite = "true".equalsIgnoreCase(identityRecord.get(KerberosIdentityDataFileReader.ONLY_KEYTAB_WRITE));
+              boolean grabKeytabFromCache = regenerateKeytabs && onlyKeytabWrite;
+              // if grabKeytabFromCache=true we will try to get keytab from cache and send to agent, it will be true for
+              // headless cached keytabs
               if (password == null) {
-                if (hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME) || kerberosPrincipalHostDAO
-                  .exists(evaluatedPrincipal, hostEntity.getHostId())) {
+                if (!grabKeytabFromCache && (hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME) || kerberosPrincipalHostDAO
+                  .exists(evaluatedPrincipal, hostEntity.getHostId()))) {
                   // There is nothing to do for this since it must already exist and we don't want to
                   // regenerate the keytab
                   message = String.format("Skipping keytab file for %s, missing password indicates nothing to do", evaluatedPrincipal);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
index 81e345a..ddf3d1b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
@@ -36,6 +36,6 @@ public interface KerberosIdentityDataFile extends KerberosDataFile {
   String KEYTAB_FILE_GROUP_NAME = "keytab_file_group_name";
   String KEYTAB_FILE_GROUP_ACCESS = "keytab_file_group_access";
   String KEYTAB_FILE_IS_CACHABLE = "keytab_file_is_cachable";
-
+  String ONLY_KEYTAB_WRITE = "only_keytab_write";
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
index f55c6f4..ea742bd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
@@ -68,7 +68,8 @@ public class KerberosIdentityDataFileWriter extends AbstractKerberosDataFileWrit
                           String principal, String principalType,
                           String keytabFilePath, String keytabFileOwnerName,
                           String keytabFileOwnerAccess, String keytabFileGroupName,
-                          String keytabFileGroupAccess, String keytabFileCanCache)
+                          String keytabFileGroupAccess, String keytabFileCanCache,
+                          String onlyKeytabWrite)
       throws IOException {
     super.appendRecord(hostName,
         serviceName,
@@ -80,7 +81,8 @@ public class KerberosIdentityDataFileWriter extends AbstractKerberosDataFileWrit
         keytabFileOwnerAccess,
         keytabFileGroupName,
         keytabFileGroupAccess,
-        keytabFileCanCache);
+        keytabFileCanCache,
+        onlyKeytabWrite);
   }
 
   @Override
@@ -95,6 +97,7 @@ public class KerberosIdentityDataFileWriter extends AbstractKerberosDataFileWrit
         KEYTAB_FILE_OWNER_ACCESS,
         KEYTAB_FILE_GROUP_NAME,
         KEYTAB_FILE_GROUP_ACCESS,
-        KEYTAB_FILE_IS_CACHABLE);
+        KEYTAB_FILE_IS_CACHABLE,
+        ONLY_KEYTAB_WRITE);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
index 4e63f4a..f56e946 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
@@ -108,7 +108,7 @@ public class PrepareDisableKerberosServerAction extends AbstractPrepareKerberosS
     Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
 
     processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
-        configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore);
+        configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore, false);
 
     // Add auth-to-local configurations to the set of changes
     Map<String, Set<String>> authToLocalProperties = kerberosHelper.translateConfigurationSpecifications(kerberosDescriptor.getAllAuthToLocalProperties());

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
index e13f033..3ec84fa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
@@ -117,7 +117,7 @@ public class PrepareEnableKerberosServerAction extends PrepareKerberosIdentities
     Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
 
     processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
-        configurations, kerberosConfigurations, true, propertiesToIgnore);
+        configurations, kerberosConfigurations, true, propertiesToIgnore, false);
 
     // Calculate the set of configurations to update and replace any variables
     // using the previously calculated Map of configurations for the host.

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
index 00c82a5..49828cb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
@@ -33,6 +33,7 @@ import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.commons.collections.CollectionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -94,7 +95,7 @@ public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerber
     Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
 
     processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
-        configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore);
+        configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore, !CollectionUtils.isEmpty(getHostFilter()));
 
     kerberosHelper.applyStackAdvisorUpdates(cluster, services, configurations, kerberosConfigurations,
         propertiesToIgnore, propertiesToRemove, true);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 5ec0692..451f802 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -202,6 +202,11 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
       // longer used
       finalizeHostRepositoryVersions(cluster);
 
+      if (upgradeContext.getOrchestrationType() == RepositoryType.STANDARD) {
+        outSB.append(String.format("Finalizing the version for cluster %s.\n", cluster.getClusterName()));
+        cluster.setCurrentStackVersion(cluster.getDesiredStackVersion());
+      }
+
       // mark revertable
       if (repositoryType.isRevertable() && direction == Direction.UPGRADE) {
         UpgradeEntity upgrade = cluster.getUpgradeInProgress();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
index 697f1d1..30bc47f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
@@ -310,7 +310,7 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
 
               // Add service-level principals (and keytabs)
               kerberosHelper.addIdentities(null, serviceIdentities,
-                  null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+                  null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, false);
               propertiesToIgnore = gatherPropertiesToIgnore(serviceIdentities, propertiesToIgnore);
 
               KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
@@ -325,7 +325,7 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
 
                 // Add component-level principals (and keytabs)
                 kerberosHelper.addIdentities(null, componentIdentities,
-                    null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+                    null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, false);
                 propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
               }
             }
@@ -346,7 +346,7 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
 
             List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(identity);
             kerberosHelper.addIdentities(null, componentIdentities,
-                null, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations);
+                null, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations, false);
             propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculation.java
new file mode 100644
index 0000000..67e1dee
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculation.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.upgrades;
+
+import java.text.MessageFormat;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.serveraction.AbstractServerAction;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+
+import com.google.inject.Inject;
+
+/**
+ * Computes Ranger Usersync ldap grouphierarchylevels property. This class is only used when upgrading from
+ * HDP-2.6.x to HDP-2.6.y.
+ */
+
+public class RangerUsersyncConfigCalculation extends AbstractServerAction {
+  private static final String RANGER_USERSYNC_CONFIG_TYPE = "ranger-ugsync-site";
+  private static final String RANGER_ENV_CONFIG_TYPE = "ranger-env";
+
+  @Inject
+  private Clusters m_clusters;
+
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws AmbariException, InterruptedException {
+
+  String clusterName = getExecutionCommand().getClusterName();
+  Cluster cluster = m_clusters.getCluster(clusterName);
+  String outputMsg = "";
+
+  Config rangerUsersyncConfig = cluster.getDesiredConfigByType(RANGER_USERSYNC_CONFIG_TYPE);
+
+  if (null == rangerUsersyncConfig) {
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+      MessageFormat.format("Config type {0} not found, skipping updating property in same.", RANGER_USERSYNC_CONFIG_TYPE), "");
+  }
+
+  String ldapGroupHierarchy = "0";
+
+  if (rangerUsersyncConfig.getProperties().containsKey("ranger.usersync.ldap.grouphierarchylevels")) {
+    ldapGroupHierarchy = rangerUsersyncConfig.getProperties().get("ranger.usersync.ldap.grouphierarchylevels");
+  } else {
+    Map<String, String> targetRangerUsersyncConfig = rangerUsersyncConfig.getProperties();
+    targetRangerUsersyncConfig.put("ranger.usersync.ldap.grouphierarchylevels", ldapGroupHierarchy);
+    rangerUsersyncConfig.setProperties(targetRangerUsersyncConfig);
+    rangerUsersyncConfig.save();
+
+    outputMsg = outputMsg + MessageFormat.format("Successfully updated {0} config type.\n", RANGER_USERSYNC_CONFIG_TYPE);
+  }
+
+  Config rangerEnvConfig = cluster.getDesiredConfigByType(RANGER_ENV_CONFIG_TYPE);
+
+  if (null == rangerEnvConfig) {
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+      MessageFormat.format("Config type {0} not found, skipping updating property in same.", RANGER_ENV_CONFIG_TYPE), "");
+  }
+
+  String enableSyncNestedGroup = "false";
+
+  if (!ldapGroupHierarchy.equals("0") ) {
+    enableSyncNestedGroup = "true";
+  }
+
+  Map<String, String> targetRangerEnvConfig = rangerEnvConfig.getProperties();
+  targetRangerEnvConfig.put("is_nested_groupsync_enabled", enableSyncNestedGroup);
+  rangerEnvConfig.setProperties(targetRangerEnvConfig);
+  rangerEnvConfig.save();
+
+  outputMsg = outputMsg + MessageFormat.format("Successfully updated {0} config type.\n", RANGER_ENV_CONFIG_TYPE);
+
+  return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputMsg, "");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/stack/RepoUtil.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/RepoUtil.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/RepoUtil.java
index d43bdfa..073fd82 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/RepoUtil.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/RepoUtil.java
@@ -189,6 +189,8 @@ public class RepoUtil {
     re.setBaseUrl(repoInfo.getBaseUrl());
     re.setName(repoInfo.getRepoName());
     re.setRepositoryId(repoInfo.getRepoId());
+    re.setDistribution(repoInfo.getDistribution());
+    re.setComponents(repoInfo.getComponents());
     return re;
   }
 
@@ -209,4 +211,4 @@ class RepositoryFolderAndXml {
       this.repoDir = repoDir;
       this.repoXml = repoXml;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
index db9d178..2992027 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
@@ -18,18 +18,28 @@
 
 package org.apache.ambari.server.stack;
 
-import java.io.File;
+import java.net.URI;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.orm.dao.MetainfoDAO;
-import org.apache.ambari.server.state.stack.LatestRepoCallable;
 import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.ambari.server.state.stack.RepoUrlInfoCallable;
+import org.apache.ambari.server.state.stack.RepoUrlInfoCallable.RepoUrlInfoResult;
+import org.apache.ambari.server.state.stack.RepoVdfCallable;
+import org.apache.commons.collections.MapUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Provides external functionality to the Stack framework.
@@ -46,15 +56,12 @@ public class StackContext {
   private ActionMetadata actionMetaData;
 
   /**
-   * Operating System families
-   */
-  private OsFamily osFamily;
-
-  /**
    * Executor used to get latest repo url's
    */
-  private LatestRepoQueryExecutor repoUpdateExecutor = new LatestRepoQueryExecutor();
+  private LatestRepoQueryExecutor repoUpdateExecutor;
 
+  private final static Logger LOG = LoggerFactory.getLogger(StackContext.class);
+  private static final int THREAD_COUNT = 10;
 
 
   /**
@@ -67,7 +74,7 @@ public class StackContext {
   public StackContext(MetainfoDAO metaInfoDAO, ActionMetadata actionMetaData, OsFamily osFamily) {
     this.metaInfoDAO = metaInfoDAO;
     this.actionMetaData = actionMetaData;
-    this.osFamily = osFamily;
+    repoUpdateExecutor = new LatestRepoQueryExecutor(osFamily);
   }
 
   /**
@@ -85,9 +92,8 @@ public class StackContext {
    * @param url    external repo information URL
    * @param stack  stack module
    */
-  public void registerRepoUpdateTask(String url, StackModule stack) {
-    repoUpdateExecutor.addTask(new LatestRepoCallable(url,
-        new File(stack.getStackDirectory().getRepoDir()), stack.getModuleInfo(), osFamily));
+  public void registerRepoUpdateTask(URI uri, StackModule stack) {
+    repoUpdateExecutor.addTask(uri, stack);
   }
 
   /**
@@ -115,16 +121,16 @@ public class StackContext {
     /**
      * Registered tasks
      */
-    private Collection<LatestRepoCallable> tasks = new ArrayList<>();
+    private Map<URI, RepoUrlInfoCallable> tasks = new HashMap<>();
 
     /**
      * Task futures
      */
-    Collection<Future<Void>> futures = new ArrayList<>();
+    Collection<Future<?>> futures = new ArrayList<>();
     /**
      * Underlying executor
      */
-    private ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
+    private ExecutorService executor = Executors.newFixedThreadPool(THREAD_COUNT, new ThreadFactory() {
       @Override
       public Thread newThread(Runnable r) {
         return new Thread(r, "Stack Version Loading Thread");
@@ -132,23 +138,89 @@ public class StackContext {
     });
 
 
+    private OsFamily m_family;
+
+    private LatestRepoQueryExecutor(OsFamily family) {
+      m_family = family;
+    }
+
     /**
-     * Add a task.
-     *
-     * @param task task to be added
+     * @param uri
+     *          uri to load
+     * @param stackModule
+     *          the stack module
      */
-    public void addTask(LatestRepoCallable task) {
-      tasks.add(task);
+    public void addTask(URI uri, StackModule stackModule) {
+      RepoUrlInfoCallable callable = null;
+      if (tasks.containsKey(uri)) {
+        callable = tasks.get(uri);
+      } else {
+        callable = new RepoUrlInfoCallable(uri);
+        tasks.put(uri, callable);
+      }
+
+      callable.addStack(stackModule);
     }
 
     /**
      * Execute all tasks.
      */
     public void execute() {
-      for (LatestRepoCallable task : tasks) {
-        futures.add(executor.submit(task));
+
+      long currentTime = System.nanoTime();
+      List<Future<Map<StackModule, RepoUrlInfoResult>>> results = new ArrayList<>();
+
+      // !!! first, load the *_urlinfo.json files and block for completion
+      try {
+        results = executor.invokeAll(tasks.values(), 2, TimeUnit.MINUTES);
+      } catch (InterruptedException e) {
+        LOG.warn("Could not load urlinfo as the executor was interrupted", e);
+        return;
+      } finally {
+        LOG.info("Loaded urlinfo in " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - currentTime) + "ms");
       }
+
+      List<Map<StackModule, RepoUrlInfoResult>> urlInfoResults = new ArrayList<>();
+      // !!! now load all the VDF _by version_ in a new thread.
+      for (Future<Map<StackModule, RepoUrlInfoResult>> future : results) {
+        try {
+          urlInfoResults.add(future.get());
+        } catch (Exception e) {
+          LOG.error("Could not load repo results", e.getCause());
+        }
+      }
+
+      currentTime = System.nanoTime();
+      for (Map<StackModule, RepoUrlInfoResult> urlInfoResult : urlInfoResults) {
+        for (Entry<StackModule, RepoUrlInfoResult> entry : urlInfoResult.entrySet()) {
+          StackModule stackModule = entry.getKey();
+          RepoUrlInfoResult result = entry.getValue();
+
+          if (null != result) {
+            if (MapUtils.isNotEmpty(result.getManifest())) {
+              for (Entry<String, Map<String, URI>> manifestEntry : result.getManifest().entrySet()) {
+                futures.add(executor.submit(new RepoVdfCallable(stackModule, manifestEntry.getKey(),
+                    manifestEntry.getValue(), m_family)));
+              }
+            }
+
+            if (MapUtils.isNotEmpty(result.getLatestVdf())) {
+             futures.add(executor.submit(
+                 new RepoVdfCallable(stackModule, result.getLatestVdf(), m_family)));
+            }
+          }
+        }
+      }
+
       executor.shutdown();
+
+      try {
+        executor.awaitTermination(2,  TimeUnit.MINUTES);
+      } catch (InterruptedException e) {
+        LOG.warn("Loading all VDF was interrupted", e.getCause());
+      } finally {
+        LOG.info("Loaded all VDF in " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - currentTime) + "ms");
+      }
     }
 
     /**
@@ -157,7 +229,7 @@ public class StackContext {
      * @return true if all tasks have completed; false otherwise
      */
     public boolean hasCompleted() {
-      for (Future<Void> f : futures) {
+      for (Future<?> f : futures) {
         if (! f.isDone()) {
           return false;
         }


[5/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

Posted by mp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
index 3688727..6dc2b93 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -19,6 +19,8 @@
 package org.apache.ambari.server.stack;
 
 import java.io.File;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -1143,10 +1145,52 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
     stackInfo.getRepositories().addAll(serviceRepos);
 
     if (null != rxml && null != rxml.getLatestURI() && stackRepos.size() > 0) {
-      stackContext.registerRepoUpdateTask(rxml.getLatestURI(), this);
+      registerRepoUpdateTask(rxml);
     }
   }
 
+  private void registerRepoUpdateTask(RepositoryXml rxml) {
+    String latest = rxml.getLatestURI();
+    if (StringUtils.isBlank(latest)) {
+      return;
+    }
+
+    URI uri = getURI(this, latest);
+
+    if (null == uri) {
+      LOG.warn("Could not determine how to load stack {}-{} latest definition for {}",
+          stackInfo.getName(), stackInfo.getVersion(), latest);
+      return;
+    }
+
+    stackContext.registerRepoUpdateTask(uri, this);
+  }
+
+  /**
+   * @param module
+   *          the stack module
+   * @param uriString
+   *          the uri string
+   * @return  a repo URI, even if it is relative-file based
+   */
+  public static URI getURI(StackModule module, String uriString) {
+
+    URI uri = null;
+    if (uriString.startsWith("http")) {
+      try {
+        uri = new URI(uriString);
+      } catch (URISyntaxException e) {
+        // should be logged later
+      }
+    } else if ('.' == uriString.charAt(0)) {
+      uri = new File(module.getStackDirectory().getRepoDir(), uriString).toURI();
+    } else {
+      uri = new File(uriString).toURI();
+    }
+
+    return uri;
+  }
+
   /**
    * Gets the service repos with duplicates filtered out. A service repo is considered duplicate if:
    * <ul>
@@ -1233,7 +1277,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
         if (null != serviceRepoXml) {
           repos.addAll(serviceRepoXml.getRepositories());
           if (null != serviceRepoXml.getLatestURI()) {
-            stackContext.registerRepoUpdateTask(serviceRepoXml.getLatestURI(), this);
+            registerRepoUpdateTask(serviceRepoXml);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
index 4b79a71..477ee66 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
@@ -26,6 +26,7 @@ import javax.annotation.Nullable;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.state.stack.RepositoryXml;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -100,7 +101,11 @@ public class StackServiceDirectory extends ServiceDirectory {
     String stackName = stackDir.getName();
     String versionString = stackVersionDir.getName().replaceAll("\\.", "");
 
-    return stackName + versionString + serviceName + "ServiceAdvisor";
+    // Remove illegal python characters from the advisor name
+    String advisorClassName = stackName + versionString + serviceName + "ServiceAdvisor";
+    advisorClassName = advisorClassName.replaceAll("[^a-zA-Z0-9]+", "");
+
+    return advisorClassName;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
index d57b5d6..8ab1fe9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
@@ -29,6 +29,8 @@ public class RepositoryInfo {
   private String osType;
   private String repoId;
   private String repoName;
+  private String distribution;
+  private String components;
   private String mirrorsList;
   private String defaultBaseUrl;
   private boolean repoSaved = false;
@@ -91,6 +93,22 @@ public class RepositoryInfo {
     this.repoName = repoName;
   }
 
+  public String getDistribution() {
+    return distribution;
+  }
+
+  public void setDistribution(String distribution) {
+    this.distribution = distribution;
+  }
+
+  public String getComponents() {
+    return components;
+  }
+
+  public void setComponents(String components) {
+    this.components = components;
+  }
+
   /**
    * @return the mirrorsList
    */
@@ -154,6 +172,8 @@ public class RepositoryInfo {
         + ", repoId=" + repoId
         + ", baseUrl=" + baseUrl
         + ", repoName=" + repoName
+        + ", distribution=" + distribution
+        + ", components=" + components
         + ", mirrorsList=" + mirrorsList
         + ", unique=" + unique
         + ", ambariManagedRepositories=" + ambariManagedRepositories
@@ -171,6 +191,8 @@ public class RepositoryInfo {
         Objects.equal(osType, that.osType) &&
         Objects.equal(repoId, that.repoId) &&
         Objects.equal(repoName, that.repoName) &&
+        Objects.equal(distribution, that.distribution) &&
+        Objects.equal(components, that.components) &&
         Objects.equal(mirrorsList, that.mirrorsList) &&
         Objects.equal(defaultBaseUrl, that.defaultBaseUrl) &&
         Objects.equal(ambariManagedRepositories, that.ambariManagedRepositories);
@@ -178,13 +200,14 @@ public class RepositoryInfo {
 
   @Override
   public int hashCode() {
-    return Objects.hashCode(baseUrl, osType, repoId, repoName, mirrorsList, defaultBaseUrl, repoSaved, unique, ambariManagedRepositories);
+    return Objects.hashCode(baseUrl, osType, repoId, repoName, distribution, components, mirrorsList, defaultBaseUrl,
+           ambariManagedRepositories);
   }
 
   public RepositoryResponse convertToResponse()
   {
     return new RepositoryResponse(getBaseUrl(), getOsType(), getRepoId(),
-        getRepoName(), getMirrorsList(), getDefaultBaseUrl());
+            getRepoName(), getDistribution(), getComponents(), getMirrorsList(), getDefaultBaseUrl());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index aecd55d..7efccd0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -34,6 +34,7 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ObjectNotFoundException;
 import org.apache.ambari.server.ServiceComponentHostNotFoundException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.controller.ServiceComponentResponse;
 import org.apache.ambari.server.controller.internal.DeleteHostComponentStatusMetaData;
 import org.apache.ambari.server.events.ServiceComponentRecoveryChangedEvent;
@@ -101,6 +102,9 @@ public class ServiceComponentImpl implements ServiceComponent {
   @Inject
   private HostComponentStateDAO hostComponentDAO;
 
+  @Inject
+  private MaintenanceStateHelper maintenanceStateHelper;
+
   @AssistedInject
   public ServiceComponentImpl(@Assisted Service service, @Assisted String componentName,
       AmbariMetaInfo ambariMetaInfo,
@@ -748,10 +752,31 @@ public class ServiceComponentImpl implements ServiceComponent {
     return count;
   }
 
+  /**
+   * Count the ServiceComponentHosts that have given state and are effectively not in maintenanceMode
+   * @param state
+   * @return
+   */
+  private int getMaintenanceOffSCHCountByState(State state) {
+    int count = 0;
+    for (ServiceComponentHost sch : hostComponents.values()) {
+      try {
+        MaintenanceState effectiveMaintenanceState = maintenanceStateHelper.getEffectiveState(sch, sch.getHost());
+        if (sch.getState() == state && effectiveMaintenanceState == MaintenanceState.OFF) {
+          count++;
+        }
+      } catch (AmbariException e) {
+        e.printStackTrace();
+      }
+    }
+    return count;
+  }
+
   private Map <String, Integer> getServiceComponentStateCount() {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", getSCHCountByState(State.STARTED));
     serviceComponentStateCountMap.put("installedCount", getSCHCountByState(State.INSTALLED));
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", getMaintenanceOffSCHCountByState(State.INSTALLED));
     serviceComponentStateCountMap.put("installFailedCount", getSCHCountByState(State.INSTALL_FAILED));
     serviceComponentStateCountMap.put("initCount", getSCHCountByState(State.INIT));
     serviceComponentStateCountMap.put("unknownCount", getSCHCountByState(State.UNKNOWN));

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
index 50443dc..f692eb5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
@@ -105,6 +105,10 @@ public class ServiceOsSpecific {
     private String repoid;
     @SerializedName("repoName")
     private String reponame;
+    @SerializedName("distribution")
+    private String distribution;
+    @SerializedName("components")
+    private String components;
 
     private Repo() {
     }
@@ -137,6 +141,14 @@ public class ServiceOsSpecific {
       return reponame;
     }
 
+    public String getDistribution() {
+      return distribution;
+    }
+
+    public String getComponents() {
+      return components;
+    }
+
     @Override
     public boolean equals(Object o) {
       if (this == o) return true;
@@ -148,6 +160,8 @@ public class ServiceOsSpecific {
       if (mirrorslist != null ? !mirrorslist.equals(repo.mirrorslist) : repo.mirrorslist != null) return false;
       if (repoid != null ? !repoid.equals(repo.repoid) : repo.repoid != null) return false;
       if (reponame != null ? !reponame.equals(repo.reponame) : repo.reponame != null) return false;
+      if (distribution != null ? !distribution.equals(repo.distribution) : repo.distribution != null) return false;
+      if (components != null ? !components.equals(repo.components) : repo.components != null) return false;
 
       return true;
     }
@@ -158,6 +172,8 @@ public class ServiceOsSpecific {
       result = 31 * result + (mirrorslist != null ? mirrorslist.hashCode() : 0);
       result = 31 * result + (repoid != null ? repoid.hashCode() : 0);
       result = 31 * result + (reponame != null ? reponame.hashCode() : 0);
+      result = 31 * result + (distribution != null ? distribution.hashCode() : 0);
+      result = 31 * result + (components != null ? components.hashCode() : 0);
       return result;
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index 6184b94..a3886ab 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -33,7 +33,6 @@ import org.apache.ambari.server.controller.StackVersionResponse;
 import org.apache.ambari.server.stack.Validable;
 import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.ConfigUpgradePack;
-import org.apache.ambari.server.state.stack.LatestRepoCallable;
 import org.apache.ambari.server.state.stack.RepositoryXml;
 import org.apache.ambari.server.state.stack.StackRoleCommandOrder;
 import org.apache.ambari.server.state.stack.UpgradePack;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
index 464cb41..8f9d8e1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
@@ -563,15 +563,29 @@ public class UpgradeHelper {
     LinkedHashSet<StageWrapper> priority = new LinkedHashSet<>();
     LinkedHashSet<StageWrapper> others = new LinkedHashSet<>();
 
+    Set<String> extraKeys = new HashSet<>();
+    LinkedHashSet<StageWrapper> extras = new LinkedHashSet<>();
+
     for (List<StageWrapper> holderItems : new List[] { oldHolder.items, newHolder.items }) {
       for (StageWrapper stageWrapper : holderItems) {
-        ServiceCheckStageWrapper wrapper = (ServiceCheckStageWrapper) stageWrapper;
-
-        if (wrapper.priority) {
-          priority.add(stageWrapper);
+        if (stageWrapper instanceof ServiceCheckStageWrapper) {
+          ServiceCheckStageWrapper wrapper = (ServiceCheckStageWrapper) stageWrapper;
+          if (wrapper.priority) {
+            priority.add(stageWrapper);
+          } else {
+            others.add(stageWrapper);
+          }
         } else {
-          others.add(stageWrapper);
+          // !!! It's a good chance that back-to-back service check groups are adding the
+          // same non-service-check wrappers.
+          // this should be "equal enough" to prevent them from duplicating on merge
+          String key = stageWrapper.toString();
+          if (!extraKeys.contains(key)) {
+            extras.add(stageWrapper);
+            extraKeys.add(key);
+          }
         }
+
       }
     }
 
@@ -580,6 +594,7 @@ public class UpgradeHelper {
 
     oldHolder.items = Lists.newLinkedList(priority);
     oldHolder.items.addAll(others);
+    oldHolder.items.addAll(extras);
   }
 
   /**
@@ -911,7 +926,8 @@ public class UpgradeHelper {
    * stack and the target stack. If a value has changed between stacks, then the
    * target stack value should be taken unless the cluster's value differs from
    * the old stack. This can occur if a property has been customized after
-   * installation.</li>
+   * installation. Read-only properties, however, are always taken from the new
+   * stack.</li>
    * <li>Downgrade: Reset the latest configurations from the service's original
    * stack. The new configurations that were created on upgrade must be left
    * intact until all components have been reverted, otherwise heartbeats will
@@ -961,6 +977,11 @@ public class UpgradeHelper {
         continue;
       }
 
+      // the auto-merge must take read-only properties even if they have changed
+      // - if the properties was read-only in the source stack, then we must
+      // take the new stack's value
+      Map<String, Set<String>> readOnlyProperties = getReadOnlyProperties(sourceStackId, serviceName);
+
       // upgrade is a bit harder - we have to merge new stack configurations in
 
       // populate a map of default configurations for the service on the old
@@ -1026,8 +1047,7 @@ public class UpgradeHelper {
         Map<String, String> existingConfigurations = existingServiceConfig.getProperties();
 
         // get the new configurations
-        Map<String, String> newDefaultConfigurations = newServiceDefaultConfigsByType.get(
-            configurationType);
+        Map<String, String> newDefaultConfigurations = newServiceDefaultConfigsByType.get(configurationType);
 
         // if the new stack configurations don't have the type, then simply add
         // all of the existing in
@@ -1046,8 +1066,7 @@ public class UpgradeHelper {
           }
         }
 
-        // process every existing configuration property for this configuration
-        // type
+        // process every existing configuration property for this configuration type
         for (Map.Entry<String, String> existingConfigurationEntry : existingConfigurations.entrySet()) {
           String existingConfigurationKey = existingConfigurationEntry.getKey();
           String existingConfigurationValue = existingConfigurationEntry.getValue();
@@ -1064,17 +1083,22 @@ public class UpgradeHelper {
               // from the original stack
               String oldDefaultValue = oldServiceDefaultConfigs.get(existingConfigurationKey);
 
-              if (!StringUtils.equals(existingConfigurationValue, oldDefaultValue)) {
-                // at this point, we've determined that there is a
-                // difference
-                // between default values between stacks, but the value was
-                // also customized, so keep the customized value
+              // see if this property is a read-only property which means that
+              // we shouldn't care if it was changed - we should take the new
+              // stack's value
+              Set<String> readOnlyPropertiesForType = readOnlyProperties.get(configurationType);
+              boolean readOnly = (null != readOnlyPropertiesForType
+                  && readOnlyPropertiesForType.contains(existingConfigurationKey));
+
+              if (!readOnly && !StringUtils.equals(existingConfigurationValue, oldDefaultValue)) {
+                // at this point, we've determined that there is a difference
+                // between default values between stacks, but the value was also
+                // customized, so keep the customized value
                 newDefaultConfigurations.put(existingConfigurationKey, existingConfigurationValue);
               }
             }
           } else {
-            // there is no entry in the map, so add the existing key/value
-            // pair
+            // there is no entry in the map, so add the existing key/value pair
             newDefaultConfigurations.put(existingConfigurationKey, existingConfigurationValue);
           }
         }
@@ -1128,4 +1152,55 @@ public class UpgradeHelper {
       }
     }
   }
+
+  /**
+   * Gets all of the read-only properties for the given service. This will also
+   * include any stack properties as well which are read-only.
+   *
+   * @param stackId
+   *          the stack to get read-only properties for (not {@code null}).
+   * @param serviceName
+   *          the namee of the service (not {@code null}).
+   * @return a map of configuration type to set of property names which are
+   *         read-only
+   * @throws AmbariException
+   */
+  private Map<String, Set<String>> getReadOnlyProperties(StackId stackId, String serviceName)
+      throws AmbariException {
+    Map<String, Set<String>> readOnlyProperties = new HashMap<>();
+
+    Set<PropertyInfo> properties = new HashSet<>();
+
+    Set<PropertyInfo> stackProperties = m_ambariMetaInfoProvider.get().getStackProperties(
+        stackId.getStackName(), stackId.getStackVersion());
+
+    Set<PropertyInfo> serviceProperties = m_ambariMetaInfoProvider.get().getServiceProperties(
+        stackId.getStackName(), stackId.getStackVersion(), serviceName);
+
+    if (CollectionUtils.isNotEmpty(stackProperties)) {
+      properties.addAll(stackProperties);
+    }
+
+    if (CollectionUtils.isNotEmpty(serviceProperties)) {
+      properties.addAll(serviceProperties);
+    }
+
+    for (PropertyInfo property : properties) {
+      ValueAttributesInfo valueAttributes = property.getPropertyValueAttributes();
+      if (null != valueAttributes && valueAttributes.getReadOnly() == Boolean.TRUE) {
+        String type = ConfigHelper.fileNameToConfigType(property.getFilename());
+
+        // get the set of properties for this type, initializing it if needed
+        Set<String> readOnlyPropertiesForType = readOnlyProperties.get(type);
+        if (null == readOnlyPropertiesForType) {
+          readOnlyPropertiesForType = new HashSet<>();
+          readOnlyProperties.put(type, readOnlyPropertiesForType);
+        }
+
+        readOnlyPropertiesForType.add(property.getName());
+      }
+    }
+
+    return readOnlyProperties;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 48d3f5b..bbbb613 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -1974,6 +1974,8 @@ public class ClusterImpl implements Cluster {
       }
     }
 
+    clusterEntity = clusterDAO.merge(clusterEntity);
+
     if (serviceName == null) {
       ArrayList<String> configTypes = new ArrayList<>();
       for (Config config: configs) {
@@ -2482,7 +2484,7 @@ public class ClusterImpl implements Cluster {
       // since the entities which were modified came from the cluster entity's
       // list to begin with, we can just save them right back - no need for a
       // new collection since the entity instances were modified directly
-      clusterEntity = clusterDAO.merge(clusterEntity);
+      clusterEntity = clusterDAO.merge(clusterEntity, true);
 
       cacheConfigurations();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
index 0d1da9c..b496942 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
@@ -34,6 +34,8 @@ import org.apache.commons.lang.StringUtils;
  */
 public abstract class AbstractKerberosDescriptor {
 
+  static final String KEY_NAME = "name";
+
   /**
    * An AbstractKerberosDescriptor serving as the parent (or container) for this
    * AbstractKerberosDescriptor.
@@ -74,7 +76,7 @@ public abstract class AbstractKerberosDescriptor {
     String name = getName();
 
     if (name != null) {
-      dataMap.put("name", name);
+      dataMap.put(KEY_NAME, name);
     }
 
     return dataMap;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
index 5658133..4255dd1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
@@ -96,6 +96,10 @@ import com.google.common.collect.Sets;
 public abstract class AbstractKerberosDescriptorContainer extends AbstractKerberosDescriptor {
   private static final Logger LOG = LoggerFactory.getLogger(AbstractKerberosDescriptorContainer.class);
 
+  static final String KEY_IDENTITIES = Type.IDENTITY.getDescriptorPluralName();
+  static final String KEY_CONFIGURATIONS = Type.CONFIGURATION.getDescriptorPluralName();
+  static final String KEY_AUTH_TO_LOCAL_PROPERTIES = Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName();
+
   /**
    * Regular expression pattern used to parse auth_to_local property specifications into the following
    * parts:
@@ -135,7 +139,7 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
       Object list;
 
       // (Safely) Get the set of KerberosIdentityDescriptors
-      list = data.get(Type.IDENTITY.getDescriptorPluralName());
+      list = data.get(KEY_IDENTITIES);
       if (list instanceof Collection) {
         for (Object item : (Collection) list) {
           if (item instanceof Map) {
@@ -145,7 +149,7 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
       }
 
       // (Safely) Get the set of KerberosConfigurationDescriptors
-      list = data.get(Type.CONFIGURATION.getDescriptorPluralName());
+      list = data.get(KEY_CONFIGURATIONS);
       if (list instanceof Collection) {
         for (Object item : (Collection) list) {
           if (item instanceof Map) {
@@ -155,7 +159,7 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
       }
 
       // (Safely) Get the set of KerberosConfigurationDescriptors
-      list = data.get(Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName());
+      list = data.get(KEY_AUTH_TO_LOCAL_PROPERTIES);
       if (list instanceof Collection) {
         for (Object item : (Collection) list) {
           if (item instanceof String) {
@@ -636,7 +640,7 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
    * @param path a String declaring the path to a KerberosIdentityDescriptor
    * @return a KerberosIdentityDescriptor identified by the path or null if not found
    */
-  protected KerberosIdentityDescriptor getReferencedIdentityDescriptor(String path)
+  public KerberosIdentityDescriptor getReferencedIdentityDescriptor(String path)
       throws AmbariException {
     KerberosIdentityDescriptor identityDescriptor = null;
 
@@ -759,7 +763,7 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
       for (KerberosIdentityDescriptor identity : identities) {
         list.put(identity.getName(), identity.toMap());
       }
-      map.put(Type.IDENTITY.getDescriptorPluralName(), list.values());
+      map.put(KEY_IDENTITIES, list.values());
     }
 
     if (configurations != null) {
@@ -769,11 +773,11 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
       for (KerberosConfigurationDescriptor configuration : configurations.values()) {
         list.put(configuration.getType(), configuration.toMap());
       }
-      map.put(Type.CONFIGURATION.getDescriptorPluralName(), list.values());
+      map.put(KEY_CONFIGURATIONS, list.values());
     }
 
     if (authToLocalProperties != null) {
-      map.put(Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalProperties);
+      map.put(KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalProperties);
     }
 
     return map;
@@ -851,12 +855,7 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
     if (identity != null) {
       KerberosIdentityDescriptor referencedIdentity;
       try {
-        if (identity.getReference() != null) {
-          referencedIdentity = getReferencedIdentityDescriptor(identity.getReference());
-        } else {
-          // For backwards compatibility, see if the identity's name indicates a reference...
-          referencedIdentity = getReferencedIdentityDescriptor(identity.getName());
-        }
+        referencedIdentity = getReferencedIdentityDescriptor(identity.getReference());
       } catch (AmbariException e) {
         throw new AmbariException(String.format("Invalid Kerberos identity reference: %s", identity.getReference()), e);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptor.java
index 768a17e..3bf1dad 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptor.java
@@ -20,7 +20,7 @@ package org.apache.ambari.server.state.kerberos;
 import java.util.Collection;
 import java.util.Map;
 
-/**
+/*
  * KerberosComponentDescriptor implements AbstractKerberosDescriptorContainer. It contains the data
  * related to a component which include the following properties:
  * <ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
index 9432f6c..0c7a9a9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
@@ -90,6 +90,9 @@ import org.apache.commons.lang.StringUtils;
  */
 public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
 
+  static final String KEY_PROPERTIES = "properties";
+  static final String KEY_SERVICES = Type.SERVICE.getDescriptorPluralName();
+
   /**
    * A Map of the "global" properties contained within this KerberosDescriptor
    */
@@ -121,7 +124,7 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
     super(data);
 
     if (data != null) {
-      Object list = data.get(Type.SERVICE.getDescriptorPluralName());
+      Object list = data.get(KEY_SERVICES);
       if (list instanceof Collection) {
         for (Object item : (Collection) list) {
           if (item instanceof Map) {
@@ -130,7 +133,7 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
         }
       }
 
-      Object map = data.get("properties");
+      Object map = data.get(KEY_PROPERTIES);
       if (map instanceof Map) {
         for (Map.Entry<?, ?> entry : ((Map<?, ?>) map).entrySet()) {
           Object value = entry.getValue();
@@ -198,7 +201,7 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
       }
 
       if (services == null) {
-        services = new TreeMap<>();
+        services = new TreeMap<String, KerberosServiceDescriptor>();
       }
 
       KerberosServiceDescriptor existing = services.get(name);
@@ -326,11 +329,11 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
       for (KerberosServiceDescriptor service : services.values()) {
         list.add(service.toMap());
       }
-      map.put(Type.SERVICE.getDescriptorPluralName(), list);
+      map.put(KEY_SERVICES, list);
     }
 
     if (properties != null) {
-      map.put("properties", new TreeMap<>(properties));
+      map.put(KEY_PROPERTIES, new TreeMap<>(properties));
     }
 
     return map;
@@ -453,7 +456,7 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
 
   private static void collectFromIdentities(String service, String component, Collection<KerberosIdentityDescriptor> identities, Map<String, String> result) {
     for (KerberosIdentityDescriptor each : identities) {
-      if (each.getPrincipalDescriptor() != null && !each.getReferencedServiceName().isPresent() && !each.getName().startsWith("/")) {
+      if (each.getPrincipalDescriptor() != null && !each.getReferencedServiceName().isPresent()) {
         String path = StringUtils.isBlank(component)
             ? String.format("%s/%s", service, each.getName())
             : String.format("%s/%s/%s", service, component, each.getName());

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java
index 911723b..ef45343 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java
@@ -33,7 +33,6 @@ import com.google.common.base.Optional;
  * <li>name</li>
  * <li>principal</li>
  * <li>keytab</li>
- * <li>password</li>
  * </ul>
  * <p/>
  * The following (pseudo) JSON Schema will yield a valid KerberosIdentityDescriptor
@@ -58,11 +57,6 @@ import com.google.common.base.Optional;
  *          "type": "{@link org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor}",
  *          }
  *        }
- *        "password": {
- *          "description": "The password to use for this identity. If not set a secure random
- *                          password will automatically be generated",
- *          "type": "string"
- *        }
  *      }
  *   }
  * </pre>
@@ -73,6 +67,11 @@ import com.google.common.base.Optional;
  */
 public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
 
+  static final String KEY_REFERENCE = "reference";
+  static final String KEY_PRINCIPAL = Type.PRINCIPAL.getDescriptorName();
+  static final String KEY_KEYTAB = Type.KEYTAB.getDescriptorName();
+  static final String KEY_WHEN = "when";
+
   /**
    * The path to the Kerberos Identity definitions this {@link KerberosIdentityDescriptor} references
    */
@@ -89,13 +88,6 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
   private KerberosKeytabDescriptor keytab = null;
 
   /**
-   * A String containing the password for this Kerberos identity
-   * <p/>
-   * If this value is null or empty, a random password will be generated as necessary.
-   */
-  private String password = null;
-
-  /**
    * An expression used to determine when this {@link KerberosIdentityDescriptor} is relevant for the
    * cluster. If the process expression is not <code>null</code> and evaluates to <code>false</code>
    * then this {@link KerberosIdentityDescriptor} will be ignored when processing identities.
@@ -105,11 +97,11 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
   /**
    * Creates a new KerberosIdentityDescriptor
    *
-   * @param name the name of this identity descriptor
+   * @param name      the name of this identity descriptor
    * @param reference an optional path to a referenced KerberosIdentityDescriptor
    * @param principal a KerberosPrincipalDescriptor
-   * @param keytab a KerberosKeytabDescriptor
-   * @param when a predicate
+   * @param keytab    a KerberosKeytabDescriptor
+   * @param when      a predicate
    */
   public KerberosIdentityDescriptor(String name, String reference, KerberosPrincipalDescriptor principal, KerberosKeytabDescriptor keytab, Predicate when) {
     setName(name);
@@ -133,24 +125,22 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
     // This is not automatically set by the super classes.
     setName(getStringValue(data, "name"));
 
-    setReference(getStringValue(data, "reference"));
+    setReference(getStringValue(data, KEY_REFERENCE));
 
     if (data != null) {
       Object item;
 
-      setPassword(getStringValue(data, "password"));
-
-      item = data.get(Type.PRINCIPAL.getDescriptorName());
+      item = data.get(KEY_PRINCIPAL);
       if (item instanceof Map) {
         setPrincipalDescriptor(new KerberosPrincipalDescriptor((Map<?, ?>) item));
       }
 
-      item = data.get(Type.KEYTAB.getDescriptorName());
+      item = data.get(KEY_KEYTAB);
       if (item instanceof Map) {
         setKeytabDescriptor(new KerberosKeytabDescriptor((Map<?, ?>) item));
       }
 
-      item = data.get("when");
+      item = data.get(KEY_WHEN);
       if (item instanceof Map) {
         setWhen(PredicateUtils.fromMap((Map<?, ?>) item));
       }
@@ -221,27 +211,6 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
   }
 
   /**
-   * Gets the password for this this KerberosIdentityDescriptor
-   *
-   * @return A String containing the password for this this KerberosIdentityDescriptor
-   * @see #password
-   */
-  public String getPassword() {
-    return password;
-  }
-
-  /**
-   * Sets the password for this this KerberosIdentityDescriptor
-   *
-   * @param password A String containing the password for this this KerberosIdentityDescriptor
-   * @see #password
-   */
-  public void setPassword(String password) {
-    this.password = password;
-  }
-
-
-  /**
    * Gets the expression (or {@link Predicate}) to use to determine when to include this Kerberos
    * identity while processing Kerberos identities.
    * <p>
@@ -295,8 +264,6 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
 
       setReference(updates.getReference());
 
-      setPassword(updates.getPassword());
-
       KerberosPrincipalDescriptor existingPrincipal = getPrincipalDescriptor();
       if (existingPrincipal == null) {
         setPrincipalDescriptor(updates.getPrincipalDescriptor());
@@ -312,7 +279,7 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
       }
 
       Predicate updatedWhen = updates.getWhen();
-      if(updatedWhen != null) {
+      if (updatedWhen != null) {
         setWhen(updatedWhen);
       }
     }
@@ -331,23 +298,19 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
     Map<String, Object> dataMap = super.toMap();
 
     if (reference != null) {
-      dataMap.put("reference", reference);
+      dataMap.put(KEY_REFERENCE, reference);
     }
 
     if (principal != null) {
-      dataMap.put(Type.PRINCIPAL.getDescriptorName(), principal.toMap());
+      dataMap.put(KEY_PRINCIPAL, principal.toMap());
     }
 
     if (keytab != null) {
-      dataMap.put(Type.KEYTAB.getDescriptorName(), keytab.toMap());
-    }
-
-    if (password != null) {
-      dataMap.put("password", password);
+      dataMap.put(KEY_KEYTAB, keytab.toMap());
     }
 
-    if(when != null) {
-      dataMap.put("when", PredicateUtils.toMap(when));
+    if (when != null) {
+      dataMap.put(KEY_WHEN, PredicateUtils.toMap(when));
     }
 
     return dataMap;
@@ -435,11 +398,6 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
                   : getKeytabDescriptor().equals(descriptor.getKeytabDescriptor())
           ) &&
           (
-              (getPassword() == null)
-                  ? (descriptor.getPassword() == null)
-                  : getPassword().equals(descriptor.getPassword())
-          ) &&
-          (
               (getWhen() == null)
                   ? (descriptor.getWhen() == null)
                   : getWhen().equals(descriptor.getWhen())

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
index a17caad..7047c81 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
@@ -97,6 +97,14 @@ import java.util.TreeMap;
  */
 public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
 
+  static final String KEY_FILE = "file";
+  static final String KEY_OWNER = "owner";
+  static final String KEY_GROUP = "group";
+  static final String KEY_CONFIGURATION = "configuration";
+  static final String KEY_CACHABLE = "cachable";
+  static final String KEY_ACL_NAME = "name";
+  static final String KEY_ACL_ACCESS = "access";
+
   /**
    * A String declaring the local username that should be set as the owner of the keytab file
    */
@@ -188,29 +196,29 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
   public KerberosKeytabDescriptor(Map<?, ?> data) {
     // The name for this KerberosKeytabDescriptor is stored in the "file" entry in the map
     // This is not automatically set by the super classes.
-    setName(getStringValue(data, "file"));
+    setName(getStringValue(data, KEY_FILE));
 
     if (data != null) {
       Object object;
 
-      object = data.get("owner");
+      object = data.get(KEY_OWNER);
       if (object instanceof Map) {
         Map<?, ?> map = (Map<?, ?>) object;
-        setOwnerName(getStringValue(map, "name"));
-        setOwnerAccess(getStringValue(map, "access"));
+        setOwnerName(getStringValue(map, KEY_ACL_NAME));
+        setOwnerAccess(getStringValue(map, KEY_ACL_ACCESS));
       }
 
-      object = data.get("group");
+      object = data.get(KEY_GROUP);
       if (object instanceof Map) {
         Map<?, ?> map = (Map<?, ?>) object;
-        setGroupName(getStringValue(map, "name"));
-        setGroupAccess(getStringValue(map, "access"));
+        setGroupName(getStringValue(map, KEY_ACL_NAME));
+        setGroupAccess(getStringValue(map, KEY_ACL_ACCESS));
       }
 
-      setConfiguration(getStringValue(data, "configuration"));
+      setConfiguration(getStringValue(data, KEY_CONFIGURATION));
 
       // If the "cachable" value is anything but false, set it to true
-      setCachable(!"false".equalsIgnoreCase(getStringValue(data, "cachable")));
+      setCachable(!"false".equalsIgnoreCase(getStringValue(data, KEY_CACHABLE)));
     }
   }
 
@@ -422,23 +430,23 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
     String data;
 
     data = getFile();
-    map.put("file", data);
+    map.put(KEY_FILE, data);
 
     // Build file owner map
     Map<String, String> owner = new TreeMap<>();
 
     data = getOwnerName();
     if (data != null) {
-      owner.put("name", data);
+      owner.put(KEY_ACL_NAME, data);
     }
 
     data = getOwnerAccess();
     if (data != null) {
-      owner.put("access", data);
+      owner.put(KEY_ACL_ACCESS, data);
     }
 
     if (!owner.isEmpty()) {
-      map.put("owner", owner);
+      map.put(KEY_OWNER, owner);
     }
     // Build file owner map (end)
 
@@ -447,22 +455,22 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
 
     data = getGroupName();
     if (data != null) {
-      group.put("name", data);
+      group.put(KEY_ACL_NAME, data);
     }
 
     data = getGroupAccess();
     if (data != null) {
-      group.put("access", data);
+      group.put(KEY_ACL_ACCESS, data);
     }
 
     if (!owner.isEmpty()) {
-      map.put("group", group);
+      map.put(KEY_GROUP, group);
     }
     // Build file owner map (end)
 
     data = getConfiguration();
     if (data != null) {
-      map.put("configuration", data);
+      map.put(KEY_CONFIGURATION, data);
     }
 
     return map;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
index 74254e3..a8e094f94 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
@@ -68,6 +68,11 @@ import java.util.TreeMap;
  */
 public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
 
+  static final String KEY_VALUE = "value";
+  static final String KEY_TYPE = "type";
+  static final String KEY_CONFIGURATION = "configuration";
+  static final String KEY_LOCAL_USERNAME = "local_username";
+
   /**
    * A string declaring the type of principal this KerberosPrincipalDescriptor represents.
    * <p/>
@@ -100,8 +105,8 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
   /**
    * Creates a new KerberosPrincipalDescriptor
    *
-   * @param principal the principal name
-   * @param type the principal type (user, service, etc...)
+   * @param principal     the principal name
+   * @param type          the principal type (user, service, etc...)
    * @param configuration the configuration used to store the principal name
    * @param localUsername the local username to map to the principal
    */
@@ -124,10 +129,10 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
    * @see org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor
    */
   public KerberosPrincipalDescriptor(Map<?, ?> data) {
-    this(getStringValue(data, "value"),
-        getKerberosPrincipalTypeValue(data, "type"),
-        getStringValue(data, "configuration"),
-        getStringValue(data, "local_username")
+    this(getStringValue(data, KEY_VALUE),
+        getKerberosPrincipalTypeValue(data, KEY_TYPE),
+        getStringValue(data, KEY_CONFIGURATION),
+        getStringValue(data, KEY_LOCAL_USERNAME)
     );
   }
 
@@ -269,10 +274,10 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
   public Map<String, Object> toMap() {
     Map<String, Object> map = new TreeMap<>();
 
-    map.put("value", getValue());
-    map.put("type", KerberosPrincipalType.translate(getType()));
-    map.put("configuration", getConfiguration());
-    map.put("local_username", getLocalUsername());
+    map.put(KEY_VALUE, getValue());
+    map.put(KEY_TYPE, KerberosPrincipalType.translate(getType()));
+    map.put(KEY_CONFIGURATION, getConfiguration());
+    map.put(KEY_LOCAL_USERNAME, getLocalUsername());
 
     return map;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
index 5da3399..51b7cd0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
@@ -46,6 +46,7 @@ import org.apache.commons.lang.builder.HashCodeBuilder;
  *      "title": "KerberosServiceDescriptor",
  *      "description": "Describes an Ambari service",
  *      "type": "object",
+ *      "preconfigure": "boolean",
  *      "properties": {
  *        "name": {
  *          "description": "An identifying name for this service descriptor.",
@@ -85,6 +86,9 @@ import org.apache.commons.lang.builder.HashCodeBuilder;
  */
 public class KerberosServiceDescriptor extends AbstractKerberosDescriptorContainer {
 
+  static final String KEY_PRECONFIGURE = "preconfigure";
+  static final String KEY_COMPONENTS = Type.COMPONENT.getDescriptorPluralName();
+
   /**
    * A Map of the components contained within this KerberosServiceDescriptor
    */
@@ -137,7 +141,7 @@ public class KerberosServiceDescriptor extends AbstractKerberosDescriptorContain
     setName(name);
 
     if (data != null) {
-      Object list = data.get(Type.COMPONENT.getDescriptorPluralName());
+      Object list = data.get(KEY_COMPONENTS);
       if (list instanceof Collection) {
         // Assume list is Collection<Map<String, Object>>
         for (Object item : (Collection) list) {
@@ -147,7 +151,7 @@ public class KerberosServiceDescriptor extends AbstractKerberosDescriptorContain
         }
       }
 
-      setPreconfigure(getBooleanValue(data, "preconfigure"));
+      setPreconfigure(getBooleanValue(data, KEY_PRECONFIGURE));
     }
   }
 
@@ -275,11 +279,11 @@ public class KerberosServiceDescriptor extends AbstractKerberosDescriptorContain
       for (KerberosComponentDescriptor component : components.values()) {
         list.add(component.toMap());
       }
-      map.put(Type.COMPONENT.getDescriptorPluralName(), list);
+      map.put(KEY_COMPONENTS, list);
     }
 
     if (preconfigure != null) {
-      map.put("preProcess", preconfigure.toString());
+      map.put(KEY_PRECONFIGURE, preconfigure.toString());
     }
 
     return map;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/services/AmbariServerAlertService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AmbariServerAlertService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AmbariServerAlertService.java
index b8058c8..d3237a9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AmbariServerAlertService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AmbariServerAlertService.java
@@ -30,7 +30,6 @@ import java.util.concurrent.TimeUnit;
 import org.apache.ambari.server.AmbariService;
 import org.apache.ambari.server.alerts.AlertRunnable;
 import org.apache.ambari.server.controller.RootServiceResponseFactory.Components;
-import org.apache.ambari.server.controller.RootServiceResponseFactory.Services;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.state.Cluster;
@@ -113,28 +112,18 @@ public class AmbariServerAlertService extends AbstractScheduledService {
   /**
    * {@inheritDoc}
    * <p/>
-   * Loads all of the {@link Components#AMBARI_SERVER} definitions and schedules
+   * Loads all of the definitions with SERVER source type and schedules
    * the ones that are enabled.
    */
   @Override
   protected void startUp() throws Exception {
     Map<String, Cluster> clusterMap = m_clustersProvider.get().getClusters();
     for (Cluster cluster : clusterMap.values()) {
-      List<AlertDefinitionEntity> entities = m_dao.findByServiceComponent(
-          cluster.getClusterId(), Services.AMBARI.name(),
-          Components.AMBARI_SERVER.name());
-
-      for (AlertDefinitionEntity entity : entities) {
+      for (AlertDefinitionEntity entity : m_dao.findBySourceType(cluster.getClusterId(), SourceType.SERVER)) {
         // don't schedule disabled alert definitions
         if (!entity.getEnabled()) {
           continue;
         }
-
-        SourceType sourceType = entity.getSourceType();
-        if (sourceType != SourceType.SERVER) {
-          continue;
-        }
-
         // schedule the Runnable for the definition
         scheduleRunnable(entity);
       }
@@ -152,10 +141,8 @@ public class AmbariServerAlertService extends AbstractScheduledService {
   protected void runOneIteration() throws Exception {
     Map<String, Cluster> clusterMap = m_clustersProvider.get().getClusters();
     for (Cluster cluster : clusterMap.values()) {
-      // get all of the cluster alerts for AMBARI/AMBARI_SERVER
-      List<AlertDefinitionEntity> entities = m_dao.findByServiceComponent(
-          cluster.getClusterId(), Services.AMBARI.name(),
-          Components.AMBARI_SERVER.name());
+      // get all of the cluster alerts with SERVER source type
+      List<AlertDefinitionEntity> entities = m_dao.findBySourceType(cluster.getClusterId(), SourceType.SERVER);
 
       // for each alert, check to see if it's scheduled correctly
       for (AlertDefinitionEntity entity : entities) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoUrlInfoCallable.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoUrlInfoCallable.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoUrlInfoCallable.java
new file mode 100644
index 0000000..ec1071e
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoUrlInfoCallable.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.stack;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.InputStreamReader;
+import java.lang.reflect.Type;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.ambari.server.controller.internal.URLStreamProvider;
+import org.apache.ambari.server.stack.StackModule;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.stack.RepoUrlInfoCallable.RepoUrlInfoResult;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Collections2;
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ * Encapsulates the work to resolve the latest repo information for a stack.
+ * This class must be used AFTER the stack has created its owned repositories.
+ */
+public class RepoUrlInfoCallable implements Callable<Map<StackModule, RepoUrlInfoResult>> {
+  private static final int LOOKUP_CONNECTION_TIMEOUT = 2000;
+  private static final int LOOKUP_READ_TIMEOUT = 3000;
+
+  private final static Logger LOG = LoggerFactory.getLogger(RepoUrlInfoCallable.class);
+
+  private URI m_uri = null;
+  private Set<StackModule> m_stacks = new HashSet<>();
+
+  public RepoUrlInfoCallable(URI uri) {
+    m_uri = uri;
+  }
+
+  public void addStack(StackModule stackModule) {
+    m_stacks.add(stackModule);
+  }
+
+  @Override
+  public Map<StackModule, RepoUrlInfoResult> call() throws Exception {
+
+    Type type = new TypeToken<Map<String, Map<String, Object>>>(){}.getType();
+    Gson gson = new Gson();
+
+    Map<String, Map<String, Object>> latestUrlMap = null;
+
+    Set<String> ids = new HashSet<>();
+    ids.addAll(Collections2.transform(m_stacks, new Function<StackModule, String>() {
+      @Override
+      public String apply(StackModule input) {
+        // TODO Auto-generated method stub
+        return new StackId(input.getModuleInfo()).toString();
+      }
+    }));
+
+    String stackIds = StringUtils.join(ids, ',');
+
+    Long time = System.nanoTime();
+
+    try {
+      if (m_uri.getScheme().startsWith("http")) {
+        URLStreamProvider streamProvider = new URLStreamProvider(
+            LOOKUP_CONNECTION_TIMEOUT, LOOKUP_READ_TIMEOUT,
+            null, null, null);
+
+        LOG.info("Loading latest URL info from {} for stacks {}", m_uri, stackIds);
+
+        latestUrlMap = gson.fromJson(new InputStreamReader(
+            streamProvider.readFrom(m_uri.toString())), type);
+      } else {
+        File jsonFile = new File(m_uri);
+
+        if (jsonFile.exists()) {
+          LOG.info("Loading latest URL info from file {} for stacks {}", m_uri, stackIds);
+          latestUrlMap = gson.fromJson(new FileReader(jsonFile), type);
+        }
+      }
+    } catch (Exception e) {
+      LOG.info("Could not load the URI from {}, stack defaults will be used", m_uri);
+      throw e;
+    } finally {
+      LOG.info("Loaded URI {} for stacks {} in {}ms", m_uri, stackIds,
+          TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - time));
+    }
+
+    Map<StackModule, RepoUrlInfoResult> result = new HashMap<>();
+
+    if (null == latestUrlMap) {
+      LOG.error("Could not load latest data for URI {} and stacks {}", m_uri, stackIds);
+      return result;
+    }
+
+    for (StackModule stackModule : m_stacks) {
+      StackId stackId = new StackId(stackModule.getModuleInfo());
+
+      Map<String, Object> map = latestUrlMap.get(stackId.toString());
+
+      if (null == map) {
+        continue;
+      }
+
+      RepoUrlInfoResult res = new RepoUrlInfoResult();
+
+      if (map.containsKey("manifests")) {
+        // versionMap is laid out like [version -> [os -> VDF uri]]
+        @SuppressWarnings("unchecked")
+        Map<String, Map<String, String>> versionMap = (Map<String, Map<String, String>>) map.get("manifests");
+
+        for (Entry<String, Map<String, String>> versionEntry : versionMap.entrySet()) {
+          String version = versionEntry.getKey();
+          Map<String, URI> resolvedOsMap = resolveOsMap(stackModule, versionEntry.getValue());
+
+          res.addVersion(version, resolvedOsMap);
+        }
+      }
+
+
+      if (map.containsKey("latest-vdf")) {
+        @SuppressWarnings("unchecked")
+        Map<String, String> osMap = (Map<String, String>) map.get("latest-vdf");
+
+        Map<String, URI> resolvedOsMap = resolveOsMap(stackModule, osMap);
+
+        res.setLatest(resolvedOsMap);
+      }
+
+      result.put(stackModule, res);
+    }
+
+
+    return result;
+
+  }
+
+  private Map<String, URI> resolveOsMap(StackModule stackModule, Map<String, String> osMap) {
+
+    Map<String, URI> resolved = new HashMap<>();
+
+    for (Entry<String, String> osEntry : osMap.entrySet()) {
+
+      String uriString = osEntry.getValue();
+
+      URI uri = StackModule.getURI(stackModule, uriString);
+
+      if (null == uri) {
+        LOG.warn("Could not resolve URI {}", uriString);
+      } else {
+        resolved.put(osEntry.getKey(), uri);
+      }
+    }
+
+    return resolved;
+  }
+
+
+  /**
+   * Stores the results saved per StackModule
+   */
+  public static class RepoUrlInfoResult {
+
+    private Map<String, Map<String, URI>> versions = new HashMap<>();
+    private Map<String, URI> latestVdf = new HashMap<>();
+
+    private void addVersion(String version, Map<String, URI> vdfMap) {
+      versions.put(version, vdfMap);
+    }
+
+    private void setLatest(Map<String, URI> latestMap) {
+      latestVdf = latestMap;
+    }
+
+    /**
+     * Each version entry here should be loaded in it's entirety in a new thread
+     */
+    public Map<String, Map<String, URI>> getManifest() {
+
+      return versions;
+    }
+
+    /**
+     * @return the latest vdf map
+     */
+    public Map<String, URI> getLatestVdf() {
+
+      return latestVdf;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoVdfCallable.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoVdfCallable.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoVdfCallable.java
new file mode 100644
index 0000000..81657a7
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepoVdfCallable.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.stack;
+
+import java.net.URI;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.Callable;
+
+import org.apache.ambari.server.stack.StackModule;
+import org.apache.ambari.server.state.RepositoryInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Encapsulates the work to resolve the latest repo information for a stack.
+ * This class must be used AFTER the stack has created its owned repositories.
+ */
+public class RepoVdfCallable implements Callable<Void> {
+
+  private final static Logger LOG = LoggerFactory.getLogger(RepoVdfCallable.class);
+
+  // !!! these are required for this callable to work
+  private final StackInfo m_stack;
+  private final OsFamily m_family;
+  private final Map<String, URI> m_vdfMap;
+
+  // !!! determines if this is for manifests or latest-vdf
+  private String m_version;
+
+  public RepoVdfCallable(StackModule stackModule,
+      String version, Map<String, URI> vdfOsMap, OsFamily os_family) {
+    m_stack = stackModule.getModuleInfo();
+    m_family = os_family;
+    m_version = version;
+    m_vdfMap = vdfOsMap;
+  }
+
+  public RepoVdfCallable(StackModule stackModule,
+      Map<String, URI> vdfOsMap, OsFamily os_family) {
+    m_stack = stackModule.getModuleInfo();
+    m_family = os_family;
+    m_version = null;
+    m_vdfMap = vdfOsMap;
+  }
+
+  @Override
+  public Void call() throws Exception {
+    if (MapUtils.isEmpty(m_vdfMap)) {
+      return null;
+    }
+
+    boolean forLatest = (null == m_version);
+
+    StackId stackId = new StackId(m_stack);
+
+    VersionDefinitionXml xml = mergeDefinitions(stackId, m_version, m_vdfMap);
+
+    if (null == xml) {
+      return null;
+    }
+
+    if (forLatest) {
+      xml.setStackDefault(true);
+      m_stack.setLatestVersionDefinition(xml);
+    } else {
+      m_stack.addVersionDefinition(m_version, xml);
+    }
+
+    return null;
+  }
+
+  /**
+   * Merges definitions loaded from the common file
+   * @param stackId the stack id
+   * @param version the version string
+   * @param osMap   the map containing all the VDF for an OS
+   * @return the merged version definition
+   * @throws Exception
+   */
+  private VersionDefinitionXml mergeDefinitions(StackId stackId, String version,
+      Map<String, URI> osMap) throws Exception {
+
+    Set<String> oses = new HashSet<>();
+    for (RepositoryInfo ri : m_stack.getRepositories()) {
+      if (null != m_family.find(ri.getOsType())) {
+        oses.add(m_family.find(ri.getOsType()));
+      }
+    }
+
+    VersionDefinitionXml.Merger merger = new VersionDefinitionXml.Merger();
+
+    for (Entry<String, URI> versionEntry : osMap.entrySet()) {
+
+      String osFamily = m_family.find(versionEntry.getKey());
+      URI uri = versionEntry.getValue();
+
+      // !!! check for aliases.  Moving this to OsFamily could result in incorrect behavior
+      if (null == osFamily) {
+        String alias = m_family.getAliases().get(versionEntry.getKey());
+        if (null != alias) {
+          osFamily = m_family.find(alias);
+        }
+      }
+
+      // !!! if the family is not known OR not part of the stack, skip
+      if (null == osFamily || !oses.contains(osFamily)) {
+        LOG.info("Stack {} cannot resolve OS {} to the supported ones: {}. Family: {}",
+            stackId, versionEntry.getKey(), StringUtils.join(oses, ','), osFamily);
+        continue;
+      }
+
+      try {
+        VersionDefinitionXml xml = timedVDFLoad(uri);
+
+        version = (null == version) ? xml.release.version : version;
+        merger.add(version, xml);
+
+      } catch (Exception e) {
+        LOG.warn("Could not load version definition for {} identified by {}. {}",
+            stackId, uri.toString(), e.getMessage(), e);
+      }
+    }
+
+    return merger.merge();
+  }
+
+  private VersionDefinitionXml timedVDFLoad(URI uri) throws Exception {
+    long time = System.currentTimeMillis();
+
+    try {
+      return VersionDefinitionXml.load(uri.toURL());
+    } finally {
+      LOG.debug("Loaded VDF {} in {}ms", uri, System.currentTimeMillis() - time);
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepositoryXml.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepositoryXml.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepositoryXml.java
index 03b3705..c2209bb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepositoryXml.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/RepositoryXml.java
@@ -146,6 +146,8 @@ public class RepositoryXml implements Validable{
     private String mirrorslist = null;
     private String repoid = null;
     private String reponame = null;
+    private String distribution = null;
+    private String components = null;
     private boolean unique = false;
 
     private Repo() {
@@ -179,6 +181,13 @@ public class RepositoryXml implements Validable{
       return reponame;
     }
 
+    public String getDistribution() {
+      return distribution;
+    }
+
+    public String getComponents() {
+      return components;
+    }
     /**
      * @return true if version of HDP that change with each release
      */
@@ -212,6 +221,8 @@ public class RepositoryXml implements Validable{
           ri.setOsType(os.trim());
           ri.setRepoId(r.getRepoId());
           ri.setRepoName(r.getRepoName());
+          ri.setDistribution(r.getDistribution());
+          ri.setComponents(r.getComponents());
           ri.setUnique(r.isUnique());
 
           repos.add(ri);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
index 68dc63f..f88691d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
@@ -110,6 +110,9 @@ public class ConfigureTask extends ServerSideActionTask {
   @XmlAttribute(name = "id")
   public String id;
 
+  @XmlAttribute(name="supports-patch")
+  public boolean supportsPatch = false;
+
   /**
    * {@inheritDoc}
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
index 9524c09..f540d8d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
@@ -124,6 +124,12 @@ public class RepositoryVersionHelper {
         repositoryEntity.setBaseUrl(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_BASE_URL_PROPERTY_ID).getAsString());
         repositoryEntity.setName(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_REPO_NAME_PROPERTY_ID).getAsString());
         repositoryEntity.setRepositoryId(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_REPO_ID_PROPERTY_ID).getAsString());
+        if (repositoryJson.get(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID) != null) {
+          repositoryEntity.setDistribution(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID).getAsString());
+        }
+        if (repositoryJson.get(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID) != null) {
+          repositoryEntity.setComponents(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID).getAsString());
+        }
         if (repositoryJson.get(RepositoryResourceProvider.REPOSITORY_MIRRORS_LIST_PROPERTY_ID) != null) {
           repositoryEntity.setMirrorsList(repositoryJson.get(RepositoryResourceProvider.REPOSITORY_MIRRORS_LIST_PROPERTY_ID).getAsString());
         }
@@ -177,6 +183,8 @@ public class RepositoryVersionHelper {
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_BASE_URL_PROPERTY_ID, repository.getBaseUrl());
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_REPO_NAME_PROPERTY_ID, repository.getRepoName());
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_REPO_ID_PROPERTY_ID, repository.getRepoId());
+        repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID, repository.getDistribution());
+        repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID, repository.getComponents());
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_MIRRORS_LIST_PROPERTY_ID, repository.getMirrorsList());
         repositoryJson.addProperty(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID, repository.isUnique());
         repositoriesJson.add(repositoryJson);
@@ -196,6 +204,8 @@ public class RepositoryVersionHelper {
         RepositoryInfo repositoryInfo = new RepositoryInfo();
         repositoryInfo.setRepoId(repositoryEntity.getRepositoryId());
         repositoryInfo.setRepoName(repositoryEntity.getName());
+        repositoryInfo.setDistribution(repositoryEntity.getDistribution());
+        repositoryInfo.setComponents(repositoryEntity.getComponents());
         repositoryInfo.setBaseUrl(repositoryEntity.getBaseUrl());
         repositoryInfo.setOsType(os.getOsType());
         repositoryInfo.setAmbariManagedRepositories(os.isAmbariManagedRepos());
@@ -310,6 +320,7 @@ public class RepositoryVersionHelper {
     commandRepo.setRepositories(osEntity.getOsType(), osEntity.getRepositories());
     commandRepo.setRepositoryVersion(repoVersion.getVersion());
     commandRepo.setRepositoryVersionId(repoVersion.getId());
+    commandRepo.setResolved(repoVersion.isResolved());
     commandRepo.setStackName(repoVersion.getStackId().getStackName());
 
     if (!osEntity.isAmbariManagedRepos()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index 67f23ac..3e894a8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -206,7 +206,7 @@ public class AmbariContext {
     StackId stackId = new StackId(stack.getName(), stack.getVersion());
 
     RepositoryVersionEntity repoVersion = null;
-    if (null == repoVersionString && null == repoVersionId) {
+    if (StringUtils.isEmpty(repoVersionString) && null == repoVersionId) {
       List<RepositoryVersionEntity> stackRepoVersions = repositoryVersionDAO.findByStack(stackId);
 
       if (stackRepoVersions.isEmpty()) {
@@ -769,8 +769,8 @@ public class AmbariContext {
         }
       });
 
-      ConfigGroupRequest request = new ConfigGroupRequest(
-          null, clusterName, absoluteGroupName, service, "Host Group Configuration",
+      ConfigGroupRequest request = new ConfigGroupRequest(null, clusterName,
+        absoluteGroupName, service, service, "Host Group Configuration",
         Sets.newHashSet(filteredGroupHosts), serviceConfigs);
 
       // get the config group provider and create config group resource

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index 0863e37..9769fae 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -29,9 +29,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Callable;
-import java.util.concurrent.Executor;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
@@ -73,13 +74,13 @@ import org.apache.ambari.server.state.quicklinksprofile.QuickLinksProfile;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
 import org.apache.ambari.server.topology.validators.TopologyValidatorService;
+import org.apache.ambari.server.utils.ManagedThreadPoolExecutor;
 import org.apache.ambari.server.utils.RetryHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.eventbus.Subscribe;
 import com.google.inject.Inject;
-import com.google.inject.Injector;
 import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
 
@@ -103,9 +104,23 @@ public class TopologyManager {
   private static final String CLUSTER_CONFIG_TASK_MAX_TIME_IN_MILLIS_PROPERTY_NAME = "cluster_configure_task_timeout";
 
   private PersistedState persistedState;
+
+  /**
+   * Single threaded executor to execute async tasks. At the moment it's only used to execute ConfigureClusterTask.
+   */
   private final ExecutorService executor = Executors.newSingleThreadExecutor();
-  private final Executor taskExecutor; // executes TopologyTasks
-  private final boolean parallelTaskCreationEnabled;
+
+  /**
+   * Thread pool size for topology task executors.
+   */
+  private int topologyTaskExecutorThreadPoolSize;
+  /**
+   * There is one ExecutorService for each cluster to execute TopologyTasks.
+   * TopologyTasks are submitted into ExecutorService for each cluster,
+   * however the ExecutorService is started only after cluster configuration is finished.
+   */
+  private final Map<Long, ManagedThreadPoolExecutor> topologyTaskExecutorServiceMap = new HashMap<>();
+
   private Collection<String> hostsToIgnore = new HashSet<>();
   private final List<HostImpl> availableHosts = new LinkedList<>();
   private final Map<String, LogicalRequest> reservedHosts = new HashMap<>();
@@ -116,9 +131,6 @@ public class TopologyManager {
   private Map<Long, ClusterTopology> clusterTopologyMap = new HashMap<>();
 
   @Inject
-  private Injector injector;
-
-  @Inject
   private StackAdvisorBlueprintProcessor stackAdvisorBlueprintProcessor;
 
   @Inject
@@ -162,17 +174,15 @@ public class TopologyManager {
   private Map<Long, Boolean> clusterProvisionWithBlueprintCreationFinished = new HashMap<>();
 
   public TopologyManager() {
-    parallelTaskCreationEnabled = false;
-    taskExecutor = executor;
+    topologyTaskExecutorThreadPoolSize = 1;
   }
 
   @Inject
   public TopologyManager(Configuration configuration) {
-    int threadPoolSize = configuration.getParallelTopologyTaskCreationThreadPoolSize();
-    parallelTaskCreationEnabled = configuration.isParallelTopologyTaskCreationEnabled() && threadPoolSize > 1;
-    taskExecutor = parallelTaskCreationEnabled
-      ? Executors.newFixedThreadPool(threadPoolSize)
-      : executor;
+    topologyTaskExecutorThreadPoolSize = configuration.getParallelTopologyTaskCreationThreadPoolSize();
+    if (!configuration.isParallelTopologyTaskCreationEnabled()) {
+      topologyTaskExecutorThreadPoolSize = 1;
+    }
   }
 
   // executed by the IoC framework after creating the object (guice)
@@ -277,10 +287,6 @@ public class TopologyManager {
     SecurityType securityType = null;
     Credential credential = null;
 
-    if (null == repoVersion && null == repoVersionID) {
-      throw new AmbariException("Repository should be created and the version passed in the request.");
-    }
-
     SecurityConfiguration securityConfiguration = processSecurityConfiguration(request);
 
     if (securityConfiguration != null && securityConfiguration.getType() == SecurityType.KERBEROS) {
@@ -318,6 +324,10 @@ public class TopologyManager {
     // set provision action requested
     topology.setProvisionAction(request.getProvisionAction());
 
+
+    // create task executor for TopologyTasks
+    getOrCreateTopologyTaskExecutor(clusterId);
+
     // persist request
     LogicalRequest logicalRequest = RetryHelper.executeWithRetry(new Callable<LogicalRequest>() {
         @Override
@@ -333,15 +343,6 @@ public class TopologyManager {
     addClusterConfigRequest(topology, new ClusterConfigurationRequest(ambariContext, topology, true,
       stackAdvisorBlueprintProcessor, securityType == SecurityType.KERBEROS));
 
-    // Notify listeners that cluster configuration finished
-    executor.submit(new Callable<Boolean>() {
-      @Override
-      public Boolean call() throws Exception {
-        ambariEventPublisher.publish(new ClusterConfigFinishedEvent(clusterName));
-        return Boolean.TRUE;
-      }
-    });
-
     // Process the logical request
     processRequest(request, topology, logicalRequest);
 
@@ -353,6 +354,17 @@ public class TopologyManager {
     return getRequestStatus(logicalRequest.getRequestId());
   }
 
+  @Subscribe
+  public void onClusterConfigFinishedEvent(ClusterConfigFinishedEvent event) {
+    ManagedThreadPoolExecutor taskExecutor = topologyTaskExecutorServiceMap.get(event.getClusterId());
+    if (taskExecutor == null) {
+      LOG.error("Can't find executor service taskQueue not found for cluster: {} ", event.getClusterName());
+    } else {
+      LOG.info("Starting topology task ExecutorService for cluster: {}", event.getClusterName());
+      taskExecutor.start();
+    }
+  }
+
 
   /**
    * Saves the quick links profile to the DB as an Ambari setting. Creates a new setting entity or updates the existing
@@ -949,16 +961,8 @@ public class TopologyManager {
     }
 
     LOG.info("TopologyManager.processAcceptedHostOffer: queue tasks for host = {} which responded {}", hostName, response.getAnswer());
-    if (parallelTaskCreationEnabled) {
-      executor.execute(new Runnable() { // do not start until cluster config done
-        @Override
-        public void run() {
-          queueHostTasks(topology, response, hostName);
-        }
-      });
-    } else {
-      queueHostTasks(topology, response, hostName);
-    }
+    queueHostTasks(topology, response, hostName);
+
   }
 
   @Transactional
@@ -967,9 +971,23 @@ public class TopologyManager {
     persistedState.registerInTopologyHostInfo(host);
   }
 
+  private ExecutorService getOrCreateTopologyTaskExecutor(Long clusterId) {
+    ManagedThreadPoolExecutor topologyTaskExecutor = this.topologyTaskExecutorServiceMap.get(clusterId);
+    if (topologyTaskExecutor == null) {
+      LOG.info("Creating TopologyTaskExecutorService for clusterId: {}", clusterId);
+
+      topologyTaskExecutor = new ManagedThreadPoolExecutor(topologyTaskExecutorThreadPoolSize,
+              topologyTaskExecutorThreadPoolSize, 0L, TimeUnit.MILLISECONDS,
+              new LinkedBlockingQueue<Runnable>());
+      topologyTaskExecutorServiceMap.put(clusterId, topologyTaskExecutor);
+    }
+    return topologyTaskExecutor;
+  }
+
   private void queueHostTasks(ClusterTopology topology, HostOfferResponse response, String hostName) {
     LOG.info("TopologyManager.processAcceptedHostOffer: queueing tasks for host = {}", hostName);
-    response.executeTasks(taskExecutor, hostName, topology, ambariContext);
+    ExecutorService executorService = getOrCreateTopologyTaskExecutor(topology.getClusterId());
+    response.executeTasks(executorService, hostName, topology, ambariContext);
   }
 
   private void updateHostWithRackInfo(ClusterTopology topology, HostOfferResponse response, HostImpl host) {
@@ -1116,7 +1134,7 @@ public class TopologyManager {
     }
 
     ConfigureClusterTask configureClusterTask = configureClusterTaskFactory.createConfigureClusterTask(topology,
-      configurationRequest);
+      configurationRequest, ambariEventPublisher);
 
     AsyncCallableService<Boolean> asyncCallableService = new AsyncCallableService<>(configureClusterTask, timeout, delay,
         Executors.newScheduledThreadPool(1));

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
index 0ce5982..60eaa59 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
@@ -22,6 +22,8 @@ import java.util.Collections;
 import java.util.Map;
 import java.util.concurrent.Callable;
 
+import org.apache.ambari.server.events.ClusterConfigFinishedEvent;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.security.authorization.internal.RunWithInternalSecurityContext;
 import org.apache.ambari.server.topology.ClusterConfigurationRequest;
 import org.apache.ambari.server.topology.ClusterTopology;
@@ -39,11 +41,14 @@ public class ConfigureClusterTask implements Callable<Boolean> {
 
   private ClusterConfigurationRequest configRequest;
   private ClusterTopology topology;
+  private AmbariEventPublisher ambariEventPublisher;
 
   @AssistedInject
-  public ConfigureClusterTask(@Assisted ClusterTopology topology, @Assisted ClusterConfigurationRequest configRequest) {
+  public ConfigureClusterTask(@Assisted ClusterTopology topology, @Assisted ClusterConfigurationRequest configRequest,
+                              @Assisted AmbariEventPublisher ambariEventPublisher) {
     this.configRequest = configRequest;
     this.topology = topology;
+    this.ambariEventPublisher = ambariEventPublisher;
   }
 
   @Override
@@ -72,6 +77,12 @@ public class ConfigureClusterTask implements Callable<Boolean> {
       throw new Exception(e);
     }
 
+    LOG.info("Cluster configuration finished successfully!");
+    // Notify listeners that cluster configuration finished
+    long clusterId = topology.getClusterId();
+    ambariEventPublisher.publish(new ClusterConfigFinishedEvent(clusterId,
+            topology.getAmbariContext().getClusterName(clusterId)));
+
     LOG.info("TopologyManager.ConfigureClusterTask: Exiting");
     return true;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTaskFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTaskFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTaskFactory.java
index 9e3c151..558af30 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTaskFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTaskFactory.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.topology.tasks;
 
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.topology.ClusterConfigurationRequest;
 import org.apache.ambari.server.topology.ClusterTopology;
 
@@ -25,6 +26,6 @@ import org.apache.ambari.server.topology.ClusterTopology;
 public interface ConfigureClusterTaskFactory {
 
    ConfigureClusterTask createConfigureClusterTask(ClusterTopology topology, ClusterConfigurationRequest
-    configRequest);
+    configRequest, AmbariEventPublisher ambariEventPublisher);
 
 }


[3/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

Posted by mp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
index 654067b..a1415703 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
@@ -31,7 +31,6 @@ import static org.easymock.EasyMock.verify;
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.lang.reflect.Field;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -68,6 +67,7 @@ import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Resource.Type;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -117,6 +117,7 @@ import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
 
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.gson.JsonArray;
 import com.google.gson.JsonObject;
@@ -124,9 +125,10 @@ import com.google.gson.JsonParser;
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
-import com.google.inject.Provider;
 import com.google.inject.util.Modules;
 
+import junit.framework.AssertionFailedError;
+
 
  /**
  * ClusterStackVersionResourceProvider tests.
@@ -144,6 +146,10 @@ public class ClusterStackVersionResourceProviderTest {
   private HostVersionDAO hostVersionDAO;
   private HostComponentStateDAO hostComponentStateDAO;
 
+  private Clusters clusters;
+  private ActionManager actionManager;
+  private AmbariManagementController managementController;
+
   public static final String OS_JSON = "[\n" +
           "   {\n" +
           "      \"repositories\":[\n" +
@@ -177,6 +183,10 @@ public class ClusterStackVersionResourceProviderTest {
     configuration = new Configuration(properties);
     stageFactory = createNiceMock(StageFactory.class);
 
+    clusters = createNiceMock(Clusters.class);
+    actionManager = createNiceMock(ActionManager.class);
+    managementController = createMock(AmbariManagementController.class);
+
     // Initialize injector
     injector = Guice.createInjector(Modules.override(inMemoryModule).with(new MockModule()));
     injector.getInstance(GuiceJpaInitializer.class);
@@ -206,10 +216,6 @@ public class ClusterStackVersionResourceProviderTest {
   }
 
   private void testCreateResources(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     Map<String, String> hostLevelParams = new HashMap<>();
     StackId stackId = new StackId("HDP", "2.0.1");
@@ -256,28 +262,15 @@ public class ClusterStackVersionResourceProviderTest {
     expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
     expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
     // First host contains versionable components
-    final List<ServiceComponentHost> schsH1 = new ArrayList<ServiceComponentHost>(){{
-      add(schDatanode);
-      add(schNamenode);
-      add(schAMS);
-    }};
+    final List<ServiceComponentHost> schsH1 = Lists.newArrayList(schDatanode, schNamenode, schAMS);
     // Second host does not contain versionable components
-    final List<ServiceComponentHost> schsH2 = new ArrayList<ServiceComponentHost>(){{
-      add(schAMS);
-    }};
-
+    final List<ServiceComponentHost> schsH2 = Lists.newArrayList(schAMS);
 
     ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
     hdfsPackage.setName("hdfs");
     List<ServiceOsSpecific.Package> packages = Collections.singletonList(hdfsPackage);
 
-    ActionManager actionManager = createNiceMock(ActionManager.class);
-
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
-    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
-
-    AbstractControllerResourceProvider.init(resourceProviderFactory);
 
     Map<String, Map<String, String>> hostConfigTags = new HashMap<>();
     expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
@@ -288,12 +281,10 @@ public class ClusterStackVersionResourceProviderTest {
     expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
     expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
     expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
-            EasyMock.anyObject(), anyObject(String.class))).
-            andReturn(packages).times((hostCount - 1) * 2); // 1 host has no versionable components, other hosts have 2 services
-//            // that's why we don't send commands to it
-
-    expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
-            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+            EasyMock.<Map<String, String>>anyObject(), anyObject(String.class))).
+            andReturn(packages).anyTimes();
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
@@ -355,18 +346,14 @@ public class ClusterStackVersionResourceProviderTest {
     StageUtils.setTopologyManager(injector.getInstance(TopologyManager.class));
     StageUtils.setConfiguration(injector.getInstance(Configuration.class));
 
+    ResourceProvider provider = createProvider(managementController);
+    injector.injectMembers(provider);
+
     // replay
-    replay(managementController, response, clusters, resourceProviderFactory, csvResourceProvider,
+    replay(managementController, response, clusters,
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, actionManager,
             executionCommand, executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
-    injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
     Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
@@ -616,10 +603,6 @@ public class ClusterStackVersionResourceProviderTest {
    }
 
    private void testCreateResourcesWithRepoDefinition(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.0.1");
 
@@ -704,7 +687,7 @@ public class ClusterStackVersionResourceProviderTest {
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
     ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ResourceProvider.class);
 
     AbstractControllerResourceProvider.init(resourceProviderFactory);
 
@@ -723,6 +706,10 @@ public class ClusterStackVersionResourceProviderTest {
     expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
             eq(managementController))).andReturn(csvResourceProvider).anyTimes();
 
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
+
+
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
         hostsForCluster).anyTimes();
@@ -794,12 +781,7 @@ public class ClusterStackVersionResourceProviderTest {
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schHBM, actionManager,
             executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -857,10 +839,6 @@ public class ClusterStackVersionResourceProviderTest {
 
     String os_json = json.toString();
 
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.0.1");
 
@@ -944,10 +922,7 @@ public class ClusterStackVersionResourceProviderTest {
     ActionManager actionManager = createNiceMock(ActionManager.class);
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
-    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
-
-    AbstractControllerResourceProvider.init(resourceProviderFactory);
+    ResourceProvider csvResourceProvider = createNiceMock(ResourceProvider.class);
 
     Map<String, Map<String, String>> hostConfigTags = new HashMap<>();
     expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
@@ -961,8 +936,8 @@ public class ClusterStackVersionResourceProviderTest {
             EasyMock.anyObject(), anyObject(String.class))).
             andReturn(packages).anyTimes(); // only one host has the versionable component
 
-    expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
-            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+    .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
@@ -990,11 +965,9 @@ public class ClusterStackVersionResourceProviderTest {
     expect(cluster.transitionHostsToInstalling(anyObject(RepositoryVersionEntity.class),
         anyObject(VersionDefinitionXml.class), eq(false))).andReturn(hostsNeedingInstallCommands).atLeastOnce();
 
-//    ExecutionCommand executionCommand = createNiceMock(ExecutionCommand.class);
     ExecutionCommand executionCommand = new ExecutionCommand();
     ExecutionCommandWrapper executionCommandWrapper = createNiceMock(ExecutionCommandWrapper.class);
 
-//    expect(executionCommand.getHostLevelParams()).andReturn(new HashMap<String, String>()).atLeastOnce();
     expect(executionCommandWrapper.getExecutionCommand()).andReturn(executionCommand).anyTimes();
 
     Stage stage = createNiceMock(Stage.class);
@@ -1029,16 +1002,11 @@ public class ClusterStackVersionResourceProviderTest {
     StageUtils.setConfiguration(injector.getInstance(Configuration.class));
 
     // replay
-    replay(managementController, response, clusters, hdfsService, hbaseService, resourceProviderFactory, csvResourceProvider,
+    replay(managementController, response, clusters, hdfsService, hbaseService, csvResourceProvider,
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schHBM, actionManager,
             executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1094,10 +1062,6 @@ public class ClusterStackVersionResourceProviderTest {
    }
 
    private void testCreateResourcesMixed(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     Map<String, String> hostLevelParams = new HashMap<>();
     StackId stackId = new StackId("HDP", "2.0.1");
@@ -1162,16 +1126,9 @@ public class ClusterStackVersionResourceProviderTest {
     expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
     expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
     // First host contains versionable components
-    final List<ServiceComponentHost> schsH1 = new ArrayList<ServiceComponentHost>(){{
-      add(schDatanode);
-      add(schNamenode);
-      add(schAMS);
-    }};
+    final List<ServiceComponentHost> schsH1 = Lists.newArrayList(schDatanode, schNamenode, schAMS);
     // Second host does not contain versionable components
-    final List<ServiceComponentHost> schsH2 = new ArrayList<ServiceComponentHost>(){{
-      add(schAMS);
-    }};
-
+    final List<ServiceComponentHost> schsH2 = Lists.newArrayList(schAMS);
 
     ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
     hdfsPackage.setName("hdfs");
@@ -1180,10 +1137,6 @@ public class ClusterStackVersionResourceProviderTest {
     ActionManager actionManager = createNiceMock(ActionManager.class);
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
-    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
-
-    AbstractControllerResourceProvider.init(resourceProviderFactory);
 
     Map<String, Map<String, String>> hostConfigTags = new HashMap<>();
     expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
@@ -1194,12 +1147,11 @@ public class ClusterStackVersionResourceProviderTest {
     expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
     expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
     expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
-            EasyMock.anyObject(), anyObject(String.class))).
-            andReturn(packages).times((hostCount - 1) * 2); // 1 host has no versionable components, other hosts have 2 services
-//            // that's why we don't send commands to it
+            EasyMock.<Map<String, String>>anyObject(), anyObject(String.class))).
+            andReturn(packages).anyTimes();
 
-    expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
-            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
@@ -1265,16 +1217,11 @@ public class ClusterStackVersionResourceProviderTest {
 
 
     // replay
-    replay(managementController, response, clusters, resourceProviderFactory, csvResourceProvider,
+    replay(managementController, response, clusters,
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, actionManager,
             executionCommand, executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1327,10 +1274,6 @@ public class ClusterStackVersionResourceProviderTest {
    */
   @Test
   public void testCreateResourcesInInstalledState() throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.2.0");
     String repoVersion = "2.2.0.1-885";
@@ -1406,8 +1349,7 @@ public class ClusterStackVersionResourceProviderTest {
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
     ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(
-        ClusterStackVersionResourceProvider.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ResourceProvider.class);
 
     AbstractControllerResourceProvider.init(resourceProviderFactory);
 
@@ -1452,10 +1394,7 @@ public class ClusterStackVersionResourceProviderTest {
         csvResourceProvider, cluster, repositoryVersionDAOMock, configHelper, schDatanode,
         stageFactory, hostVersionDAO);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(type,
-        PropertyHelper.getPropertyIds(type), PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request. add more maps for multiple
@@ -1499,10 +1438,6 @@ public class ClusterStackVersionResourceProviderTest {
 
   @Test
   public void testCreateResourcesPPC() throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     Map<String, String> hostLevelParams = new HashMap<>();
     StackId stackId = new StackId("HDP", "2.0.1");
@@ -1565,16 +1500,9 @@ public class ClusterStackVersionResourceProviderTest {
     expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
     expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
     // First host contains versionable components
-    final List<ServiceComponentHost> schsH1 = new ArrayList<ServiceComponentHost>(){{
-      add(schDatanode);
-      add(schNamenode);
-      add(schAMS);
-    }};
+    final List<ServiceComponentHost> schsH1 = Lists.newArrayList(schDatanode, schNamenode, schAMS);
     // Second host does not contain versionable components
-    final List<ServiceComponentHost> schsH2 = new ArrayList<ServiceComponentHost>(){{
-      add(schAMS);
-    }};
-
+    final List<ServiceComponentHost> schsH2 = Lists.newArrayList(schAMS);
 
     ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
     hdfsPackage.setName("hdfs");
@@ -1598,8 +1526,11 @@ public class ClusterStackVersionResourceProviderTest {
     expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
     expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
             (Map<String, String>) anyObject(List.class), anyObject(String.class))).
-            andReturn(packages).anyTimes(); // 1 host has no versionable components, other hosts have 2 services
-  //            // that's why we don't send commands to it
+            andReturn(packages).anyTimes();
+
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
+
 
     expect(resourceProviderFactory.getHostResourceProvider(anyObject(Set.class), anyObject(Map.class),
             eq(managementController))).andReturn(csvResourceProvider).anyTimes();
@@ -1670,12 +1601,7 @@ public class ClusterStackVersionResourceProviderTest {
             cluster, repoVersion, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, actionManager,
             executionCommand, executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1709,10 +1635,6 @@ public class ClusterStackVersionResourceProviderTest {
 
   @Test
   public void testGetSorted() throws Exception {
-
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    final Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.2.0");
 
@@ -1764,19 +1686,17 @@ public class ClusterStackVersionResourceProviderTest {
         csvResourceProvider, cluster, repositoryVersionDAOMock, configHelper,
         stageFactory, hostVersionDAO);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(type,
-        PropertyHelper.getPropertyIds(type), PropertyHelper.getKeyPropertyIds(type),
-        /*managementController*/null);
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
-    Field field = ClusterStackVersionResourceProvider.class.getDeclaredField("clusters");
-    field.setAccessible(true);
-    field.set(null, new Provider<Clusters>() {
-      @Override
-      public Clusters get() {
-        return clusters;
-      }
-    });
+//    Field field = ClusterStackVersionResourceProvider.class.getDeclaredField("clusters");
+//    field.setAccessible(true);
+//    field.set(null, new Provider<Clusters>() {
+//      @Override
+//      public Clusters get() {
+//        return clusters;
+//      }
+//    });
 
     // set the security auth
     SecurityContextHolder.getContext().setAuthentication(
@@ -1852,6 +1772,9 @@ public class ClusterStackVersionResourceProviderTest {
 
     expect(desiredVersionDefinition.getAvailableServices((StackInfo)EasyMock.anyObject())).andReturn(availableServices).once();
 
+    expect(cluster.transitionHostsToInstalling(
+        anyObject(RepositoryVersionEntity.class), anyObject(VersionDefinitionXml.class),
+        EasyMock.anyBoolean())).andReturn(Collections.<Host>emptyList()).atLeastOnce();
 
     replay(cluster, repoVersionEnt, desiredVersionDefinition, service1, service2, availableService1, availableService2);
 
@@ -1890,7 +1813,9 @@ public class ClusterStackVersionResourceProviderTest {
      availableServices.add(availableService2);
 
      expect(desiredVersionDefinition.getAvailableServices((StackInfo)EasyMock.anyObject())).andReturn(availableServices).once();
-
+     expect(cluster.transitionHostsToInstalling(
+         anyObject(RepositoryVersionEntity.class), anyObject(VersionDefinitionXml.class),
+         EasyMock.anyBoolean())).andThrow(new AssertionFailedError()).anyTimes();
 
      replay(cluster, repoVersionEnt, desiredVersionDefinition, service1, availableService1, availableService2);
 
@@ -1906,10 +1831,6 @@ public class ClusterStackVersionResourceProviderTest {
    }
 
    private void testCreateResourcesExistingUpgrade(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
 
     expect(managementController.getClusters()).andReturn(clusters).anyTimes();
@@ -1927,12 +1848,7 @@ public class ClusterStackVersionResourceProviderTest {
     // replay
     replay(managementController, clusters, cluster);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1964,6 +1880,17 @@ public class ClusterStackVersionResourceProviderTest {
     verify(cluster);
 
   }
+
+   private ClusterStackVersionResourceProvider createProvider(AmbariManagementController amc) {
+     ResourceProviderFactory factory = injector.getInstance(ResourceProviderFactory.class);
+     AbstractControllerResourceProvider.init(factory);
+
+     Resource.Type type = Type.ClusterStackVersion;
+     return (ClusterStackVersionResourceProvider) AbstractControllerResourceProvider.getResourceProvider(type,
+         PropertyHelper.getPropertyIds(type), PropertyHelper.getKeyPropertyIds(type),
+         amc);
+   }
+
   private class MockModule extends AbstractModule {
     @Override
     protected void configure() {
@@ -1973,6 +1900,9 @@ public class ClusterStackVersionResourceProviderTest {
       bind(StageFactory.class).toInstance(stageFactory);
       bind(HostVersionDAO.class).toInstance(hostVersionDAO);
       bind(HostComponentStateDAO.class).toInstance(hostComponentStateDAO);
+      bind(Clusters.class).toInstance(clusters);
+      bind(ActionManager.class).toInstance(actionManager);
+      bind(AmbariManagementController.class).toInstance(managementController);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
index 0ced822..1ea4b9a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
@@ -217,6 +217,7 @@ public class ComponentResourceProviderTest {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", 1);
     serviceComponentStateCountMap.put("installedCount", 0);
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", 0);
     serviceComponentStateCountMap.put("installFailedCount", 0);
     serviceComponentStateCountMap.put("initCount", 0);
     serviceComponentStateCountMap.put("unknownCount", 1);
@@ -280,6 +281,7 @@ public class ComponentResourceProviderTest {
     propertyIds.add(ComponentResourceProvider.COMPONENT_TOTAL_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID);
+    propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALL_FAILED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INIT_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_UNKNOWN_COUNT_PROPERTY_ID);
@@ -311,6 +313,8 @@ public class ComponentResourceProviderTest {
       Assert.assertEquals(0, resource.getPropertyValue(
         ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
+        ComponentResourceProvider.COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID));
+      Assert.assertEquals(0, resource.getPropertyValue(
           ComponentResourceProvider.COMPONENT_INSTALL_FAILED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
           ComponentResourceProvider.COMPONENT_INIT_COUNT_PROPERTY_ID));
@@ -379,6 +383,7 @@ public class ComponentResourceProviderTest {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", 0);
     serviceComponentStateCountMap.put("installedCount", 1);
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", 0);
     serviceComponentStateCountMap.put("installFailedCount", 0);
     serviceComponentStateCountMap.put("initCount", 0);
     serviceComponentStateCountMap.put("unknownCount", 0);
@@ -691,6 +696,7 @@ public class ComponentResourceProviderTest {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", 0);
     serviceComponentStateCountMap.put("installedCount", 1);
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", 0);
     serviceComponentStateCountMap.put("installFailedCount", 0);
     serviceComponentStateCountMap.put("initCount", 0);
     serviceComponentStateCountMap.put("unknownCount", 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
index 6a0ab89..ca5cde0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
@@ -48,13 +48,15 @@ public class RepositoryResourceProviderTest {
   private static final String VAL_REPO_ID = "HDP-0.2";
   private static final String VAL_REPO_NAME = "HDP1";
   private static final String VAL_BASE_URL = "http://foo.com";
+  private static final String VAL_DISTRIBUTION = "mydist";
+  private static final String VAL_COMPONENT_NAME = "mycomponentname";
 
   @Test
   public void testGetResources() throws Exception{
     AmbariManagementController managementController = EasyMock.createMock(AmbariManagementController.class);
 
     RepositoryResponse rr = new RepositoryResponse(VAL_BASE_URL, VAL_OS,
-        VAL_REPO_ID, VAL_REPO_NAME, null, null);
+        VAL_REPO_ID, VAL_REPO_NAME, VAL_DISTRIBUTION, VAL_COMPONENT_NAME, null, null);
     rr.setStackName(VAL_STACK_NAME);
     rr.setStackVersion(VAL_STACK_VERSION);
     Set<RepositoryResponse> allResponse = new HashSet<>();
@@ -76,6 +78,8 @@ public class RepositoryResourceProviderTest {
     propertyIds.add(RepositoryResourceProvider.REPOSITORY_OS_TYPE_PROPERTY_ID);
     propertyIds.add(RepositoryResourceProvider.REPOSITORY_REPO_ID_PROPERTY_ID);
     propertyIds.add(RepositoryResourceProvider.REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
+    propertyIds.add(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+    propertyIds.add(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID);
 
     Predicate predicate =
         new PredicateBuilder().property(RepositoryResourceProvider.REPOSITORY_STACK_NAME_PROPERTY_ID).equals(VAL_STACK_NAME)
@@ -111,6 +115,12 @@ public class RepositoryResourceProviderTest {
 
       o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
       Assert.assertNull(o);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_DISTRIBUTION);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_COMPONENT_NAME);
     }
 
     // !!! check that the stack version id is returned
@@ -139,6 +149,12 @@ public class RepositoryResourceProviderTest {
 
       o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
       Assert.assertEquals(525L, o);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_DISTRIBUTION);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_COMPONENT_NAME);
     }
 
     // verify
@@ -152,7 +168,7 @@ public class RepositoryResourceProviderTest {
     AmbariManagementController managementController = EasyMock.createMock(AmbariManagementController.class);
 
     RepositoryResponse rr = new RepositoryResponse(VAL_BASE_URL, VAL_OS,
-        VAL_REPO_ID, VAL_REPO_NAME, null, null);
+        VAL_REPO_ID, VAL_REPO_NAME, null, null, null, null);
     Set<RepositoryResponse> allResponse = new HashSet<>();
     allResponse.add(rr);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
index bb3fa8f..f13aeed 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
@@ -33,6 +33,7 @@ import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.HostRoleCommandFactoryImpl;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AbstractRootServiceResponseFactory;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.KerberosHelper;
@@ -56,8 +57,10 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
@@ -65,6 +68,7 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.UpgradeHelper;
+import org.apache.ambari.server.state.ValueAttributesInfo;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -95,12 +99,15 @@ import com.google.inject.assistedinject.FactoryModuleBuilder;
 public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {
 
   private Injector m_injector;
+  private AmbariMetaInfo m_metainfo;
 
   /**
    * @throws Exception
    */
   @Before
   public void before() throws Exception {
+    m_metainfo = createNiceMock(AmbariMetaInfo.class);
+
     MockModule mockModule = new MockModule();
 
     // create an injector which will inject the mocks
@@ -286,6 +293,134 @@ public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {
     assertEquals("stack-220-original", expectedBarType.get("bar-property-2"));
   }
 
+  /**
+   * Tests that any read-only properties are not taken from the existing
+   * configs, but from the new stack value.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testReadOnlyPropertyIsTakenFromTargetStack() throws Exception {
+    RepositoryVersionEntity repoVersion211 = createNiceMock(RepositoryVersionEntity.class);
+    RepositoryVersionEntity repoVersion220 = createNiceMock(RepositoryVersionEntity.class);
+
+    StackId stack211 = new StackId("HDP-2.1.1");
+    StackId stack220 = new StackId("HDP-2.2.0");
+
+    String version211 = "2.1.1.0-1234";
+    String version220 = "2.2.0.0-1234";
+
+    expect(repoVersion211.getStackId()).andReturn(stack211).atLeastOnce();
+    expect(repoVersion211.getVersion()).andReturn(version211).atLeastOnce();
+
+    expect(repoVersion220.getStackId()).andReturn(stack220).atLeastOnce();
+    expect(repoVersion220.getVersion()).andReturn(version220).atLeastOnce();
+
+    String fooSite = "foo-site";
+    String fooPropertyName = "foo-property-1";
+    String serviceName = "ZOOKEEPER";
+
+    Map<String, Map<String, String>> stack211Configs = new HashMap<>();
+    Map<String, String> stack211FooType = new HashMap<>();
+    stack211Configs.put(fooSite, stack211FooType);
+    stack211FooType.put(fooPropertyName, "stack-211-original");
+
+    Map<String, Map<String, String>> stack220Configs = new HashMap<>();
+    Map<String, String> stack220FooType = new HashMap<>();
+    stack220Configs.put(fooSite, stack220FooType);
+    stack220FooType.put(fooPropertyName, "stack-220-original");
+
+    PropertyInfo readOnlyProperty = new PropertyInfo();
+    ValueAttributesInfo valueAttributesInfo = new ValueAttributesInfo();
+    valueAttributesInfo.setReadOnly(true);
+    readOnlyProperty.setName(fooPropertyName);
+    readOnlyProperty.setFilename(fooSite + ".xml");
+    readOnlyProperty.setPropertyValueAttributes(null);
+    readOnlyProperty.setPropertyValueAttributes(valueAttributesInfo);
+
+    expect(m_metainfo.getServiceProperties(stack211.getStackName(), stack211.getStackVersion(),
+        serviceName)).andReturn(Sets.newHashSet(readOnlyProperty)).atLeastOnce();
+
+    Map<String, String> existingFooType = new HashMap<>();
+
+    ClusterConfigEntity fooConfigEntity = createNiceMock(ClusterConfigEntity.class);
+
+    expect(fooConfigEntity.getType()).andReturn(fooSite);
+
+    Config fooConfig = createNiceMock(Config.class);
+
+    existingFooType.put(fooPropertyName, "my-foo-property-1");
+
+    expect(fooConfig.getType()).andReturn(fooSite).atLeastOnce();
+    expect(fooConfig.getProperties()).andReturn(existingFooType);
+
+    Map<String, DesiredConfig> desiredConfigurations = new HashMap<>();
+    desiredConfigurations.put(fooSite, null);
+
+    Service zookeeper = createNiceMock(Service.class);
+    expect(zookeeper.getName()).andReturn(serviceName).atLeastOnce();
+    expect(zookeeper.getServiceComponents()).andReturn(new HashMap<String, ServiceComponent>()).once();
+    zookeeper.setDesiredRepositoryVersion(repoVersion220);
+    expectLastCall().once();
+
+    Cluster cluster = createNiceMock(Cluster.class);
+    expect(cluster.getCurrentStackVersion()).andReturn(stack211).atLeastOnce();
+    expect(cluster.getDesiredStackVersion()).andReturn(stack220);
+    expect(cluster.getDesiredConfigs()).andReturn(desiredConfigurations);
+    expect(cluster.getDesiredConfigByType(fooSite)).andReturn(fooConfig);
+    expect(cluster.getService(serviceName)).andReturn(zookeeper);
+
+    ConfigHelper configHelper = m_injector.getInstance(ConfigHelper.class);
+
+    expect(configHelper.getDefaultProperties(stack211, serviceName)).andReturn(stack211Configs).anyTimes();
+    expect(configHelper.getDefaultProperties(stack220, serviceName)).andReturn(stack220Configs).anyTimes();
+
+    Capture<Map<String, Map<String, String>>> expectedConfigurationsCapture = EasyMock.newCapture();
+
+    configHelper.createConfigTypes(EasyMock.anyObject(Cluster.class),
+        EasyMock.anyObject(StackId.class), EasyMock.anyObject(AmbariManagementController.class),
+        EasyMock.capture(expectedConfigurationsCapture), EasyMock.anyObject(String.class),
+        EasyMock.anyObject(String.class));
+
+    expectLastCall().once();
+
+    // mock the service config DAO and replay it
+    ServiceConfigEntity zookeeperServiceConfig = createNiceMock(ServiceConfigEntity.class);
+    expect(zookeeperServiceConfig.getClusterConfigEntities()).andReturn(
+        Lists.newArrayList(fooConfigEntity));
+
+    ServiceConfigDAO serviceConfigDAOMock = m_injector.getInstance(ServiceConfigDAO.class);
+    List<ServiceConfigEntity> latestServiceConfigs = Lists.newArrayList(zookeeperServiceConfig);
+    expect(serviceConfigDAOMock.getLastServiceConfigsForService(EasyMock.anyLong(),
+        eq(serviceName))).andReturn(latestServiceConfigs).once();
+
+    UpgradeContext context = createNiceMock(UpgradeContext.class);
+    expect(context.getCluster()).andReturn(cluster).atLeastOnce();
+    expect(context.getType()).andReturn(UpgradeType.ROLLING).atLeastOnce();
+    expect(context.getDirection()).andReturn(Direction.UPGRADE).atLeastOnce();
+    expect(context.getRepositoryVersion()).andReturn(repoVersion220).anyTimes();
+    expect(context.getSupportedServices()).andReturn(Sets.newHashSet(serviceName)).atLeastOnce();
+    expect(context.getSourceRepositoryVersion(EasyMock.anyString())).andReturn(repoVersion211).atLeastOnce();
+    expect(context.getTargetRepositoryVersion(EasyMock.anyString())).andReturn(repoVersion220).atLeastOnce();
+    expect(context.getOrchestrationType()).andReturn(RepositoryType.STANDARD).anyTimes();
+    expect(context.getHostRoleCommandFactory()).andStubReturn(m_injector.getInstance(HostRoleCommandFactory.class));
+    expect(context.getRoleGraphFactory()).andStubReturn(m_injector.getInstance(RoleGraphFactory.class));
+
+    replayAll();
+
+    UpgradeHelper upgradeHelper = m_injector.getInstance(UpgradeHelper.class);
+    upgradeHelper.updateDesiredRepositoriesAndConfigs(context);
+
+    Map<String, Map<String, String>> expectedConfigurations = expectedConfigurationsCapture.getValue();
+    Map<String, String> expectedFooType = expectedConfigurations.get(fooSite);
+
+    // As the upgrade pack did not have any Flume updates, its configs should
+    // not be updated.
+    assertEquals(1, expectedConfigurations.size());
+    assertEquals(1, expectedFooType.size());
+
+    assertEquals("stack-220-original", expectedFooType.get(fooPropertyName));
+  }
 
   private class MockModule implements Module {
 
@@ -325,6 +460,7 @@ public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {
       binder.bind(ServiceConfigDAO.class).toInstance(createNiceMock(ServiceConfigDAO.class));
       binder.install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
       binder.bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
+      binder.bind(AmbariMetaInfo.class).toInstance(m_metainfo);
 
       binder.requestStaticInjection(UpgradeResourceProvider.class);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 37a7b44..fea56d9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -84,6 +84,7 @@ import org.apache.ambari.server.orm.entities.UpgradeHistoryEntity;
 import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
 import org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryAction;
+import org.apache.ambari.server.serveraction.upgrades.ConfigureAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -1733,6 +1734,113 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
     upgradeResourceProvider.createResources(request);
   }
 
+  @Test
+  public void testCreatePatchWithConfigChanges() throws Exception {
+    Cluster cluster = clusters.getCluster("c1");
+
+    File f = new File("src/test/resources/version_definition_test_patch_config.xml");
+    repoVersionEntity2112.setType(RepositoryType.PATCH);
+    repoVersionEntity2112.setVersionXml(IOUtils.toString(new FileInputStream(f)));
+    repoVersionEntity2112.setVersionXsd("version_definition.xsd");
+    repoVersionDao.merge(repoVersionEntity2112);
+
+    List<UpgradeEntity> upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
+    assertEquals(0, upgrades.size());
+
+    Map<String, Object> requestProps = new HashMap<>();
+    requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
+    requestProps.put(UpgradeResourceProvider.UPGRADE_REPO_VERSION_ID, String.valueOf(repoVersionEntity2112.getId()));
+    requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+    requestProps.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, "true");
+    requestProps.put(UpgradeResourceProvider.UPGRADE_DIRECTION, Direction.UPGRADE.name());
+
+    // !!! test that a PATCH upgrade skips config changes
+    ResourceProvider upgradeResourceProvider = createProvider(amc);
+
+    Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+    RequestStatus status = upgradeResourceProvider.createResources(request);
+    Set<Resource> resources = status.getAssociatedResources();
+    assertEquals(1, resources.size());
+    Long requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+    assertNotNull(requestId);
+
+    UpgradeEntity upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+    assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+    HostRoleCommandDAO hrcDAO = injector.getInstance(HostRoleCommandDAO.class);
+    List<HostRoleCommandEntity> commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+    boolean foundConfigTask = false;
+    for (HostRoleCommandEntity command : commands) {
+      if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+          command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+        foundConfigTask = true;
+        break;
+      }
+    }
+    assertFalse(foundConfigTask);
+
+    // !!! test that a patch with a supported patch change gets picked up
+    cluster.setUpgradeEntity(null);
+    requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test_force_config_change");
+    request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+
+    status = upgradeResourceProvider.createResources(request);
+    resources = status.getAssociatedResources();
+    assertEquals(1, resources.size());
+    requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+    assertNotNull(requestId);
+
+    upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+    assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+    commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+    foundConfigTask = false;
+    for (HostRoleCommandEntity command : commands) {
+      if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+          command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+        foundConfigTask = true;
+        break;
+      }
+    }
+    assertTrue(foundConfigTask);
+
+
+
+    // !!! test that a regular upgrade will pick up the config change
+    cluster.setUpgradeEntity(null);
+    repoVersionEntity2112.setType(RepositoryType.STANDARD);
+    repoVersionDao.merge(repoVersionEntity2112);
+
+    requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+    request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+
+    status = upgradeResourceProvider.createResources(request);
+    resources = status.getAssociatedResources();
+    assertEquals(1, resources.size());
+    requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+    assertNotNull(requestId);
+
+    upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+    assertEquals(RepositoryType.STANDARD, upgradeEntity.getOrchestration());
+
+    commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+    foundConfigTask = false;
+    for (HostRoleCommandEntity command : commands) {
+      if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+          command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+        foundConfigTask = true;
+        break;
+      }
+    }
+    assertTrue(foundConfigTask);
+
+  }
+
+
+
   private String parseSingleMessage(String msgStr){
     JsonParser parser = new JsonParser();
     JsonArray msgArray = (JsonArray) parser.parse(msgStr);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
index b066324..ffacab9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
@@ -306,6 +306,11 @@ public class StackVersionListenerTest extends EasyMockSupport {
     RepositoryVersionDAO dao = createNiceMock(RepositoryVersionDAO.class);
     RepositoryVersionEntity entity = createNiceMock(RepositoryVersionEntity.class);
     expect(entity.getVersion()).andReturn("2.4.0.0").once();
+
+    // when the version gets reported back, we set this repo to resolved
+    entity.setResolved(true);
+    expectLastCall().once();
+
     expect(dao.findByPK(1L)).andReturn(entity).once();
     expect(dao.merge(entity)).andReturn(entity).once();
 
@@ -325,6 +330,47 @@ public class StackVersionListenerTest extends EasyMockSupport {
   }
 
   /**
+   * Tests that if a component advertises a version and the repository already
+   * matches, that we ensure that it is marked as resolved.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testRepositoryResolvedWhenVersionsMatch() throws Exception {
+    String version = "2.4.0.0";
+
+    expect(sch.getVersion()).andReturn(version);
+    expect(componentInfo.isVersionAdvertised()).andReturn(true).once();
+
+    RepositoryVersionDAO dao = createNiceMock(RepositoryVersionDAO.class);
+    RepositoryVersionEntity entity = createNiceMock(RepositoryVersionEntity.class);
+    expect(entity.getVersion()).andReturn(version).once();
+    expect(entity.isResolved()).andReturn(false).once();
+
+    // when the version gets reported back, we set this repo to resolved
+    entity.setResolved(true);
+    expectLastCall().once();
+
+    expect(dao.findByPK(1L)).andReturn(entity).once();
+    expect(dao.merge(entity)).andReturn(entity).once();
+
+    replayAll();
+
+    String newVersion = version;
+
+    HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cluster, sch, newVersion, 1L);
+
+    // !!! avoid injector for test class
+    Field field = StackVersionListener.class.getDeclaredField("repositoryVersionDAO");
+    field.setAccessible(true);
+    field.set(listener, dao);
+
+    listener.onAmbariEvent(event);
+
+    verifyAll();
+  }
+
+  /**
    * Tests that the {@link RepositoryVersionEntity} is not updated if there is
    * an upgrade, even if the repo ID is passed back and the versions don't
    * match.

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
index 8f587be..bc1ab47 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
@@ -32,7 +32,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 
 import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
@@ -87,6 +86,8 @@ import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.persist.Transactional;
 import com.google.inject.util.Modules;
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.WebResource;
 
 import junit.framework.Assert;
 
@@ -640,15 +641,35 @@ public class ExecutionScheduleManagerTest {
   }
 
   @Test
-  public void testCompleteRelativePath() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
-    ExecutionScheduleManager scheduleManagerMock = createMock(ExecutionScheduleManager.class);
-    Method completeRelativeUri = ExecutionScheduleManager.class.getDeclaredMethod("completeRelativeUri", String.class);
-    completeRelativeUri.setAccessible(true);
-
-    assertEquals("api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "clusters"));
-    assertEquals("api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "/clusters"));
-    assertEquals("/api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "/api/v1/clusters"));
-    assertEquals("api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "api/v1/clusters"));
-    assertEquals("", completeRelativeUri.invoke(scheduleManagerMock, ""));
+  public void testExtendApiResource() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+    WebResource webResource = Client.create().resource("http://localhost:8080/");
+
+    String clustersEndpoint = "http://localhost:8080/api/v1/clusters";
+
+    Clusters clustersMock = createMock(Clusters.class);
+
+    Configuration configurationMock = createNiceMock(Configuration.class);
+    ExecutionScheduler executionSchedulerMock = createMock(ExecutionScheduler.class);
+    InternalTokenStorage tokenStorageMock = createMock(InternalTokenStorage.class);
+    ActionDBAccessor actionDBAccessorMock = createMock(ActionDBAccessor.class);
+    Gson gson = new Gson();
+
+    replay(clustersMock, configurationMock, executionSchedulerMock, tokenStorageMock,
+      actionDBAccessorMock);
+
+    ExecutionScheduleManager scheduleManager =
+      new ExecutionScheduleManager(configurationMock, executionSchedulerMock,
+        tokenStorageMock, clustersMock, actionDBAccessorMock, gson);
+
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "clusters").getURI().toString());
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "/clusters").getURI().toString());
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "/api/v1/clusters").getURI().toString());
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "api/v1/clusters").getURI().toString());
+    assertEquals("http://localhost:8080/",
+      scheduleManager.extendApiResource(webResource, "").getURI().toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
index 95e5513..8ff5ad2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
@@ -149,7 +149,7 @@ public class AbstractPrepareKerberosServerActionTest {
       identityFilter,
       "",
         configurations, kerberosConfigurations,
-        false, propertiesToIgnore);
+        false, propertiesToIgnore, false);
 
     verify(kerberosHelper);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
index 323ba8e..03727d7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
@@ -54,7 +54,7 @@ public class KerberosIdentityDataFileTest {
           "principal" + i, "principal_type" + i, "keytabFilePath" + i,
           "keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
           "keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
-          "false");
+          "false", "false");
     }
 
     // Add some odd characters
@@ -62,7 +62,7 @@ public class KerberosIdentityDataFileTest {
         "principal", "principal_type", "keytabFilePath",
         "'keytabFileOwnerName'", "<keytabFileOwnerAccess>",
         "\"keytabFileGroupName\"", "keytab,File,Group,Access",
-        "false");
+        "false", "false");
 
     writer.close();
     Assert.assertTrue(writer.isClosed());
@@ -153,7 +153,7 @@ public class KerberosIdentityDataFileTest {
         "principal", "principal_type", "keytabFilePath",
         "keytabFileOwnerName", "keytabFileOwnerAccess",
         "keytabFileGroupName", "keytabFileGroupAccess",
-        "true");
+        "true", "false");
 
     writer.close();
     Assert.assertTrue(writer.isClosed());
@@ -179,7 +179,7 @@ public class KerberosIdentityDataFileTest {
         "principal", "principal_type", "keytabFilePath",
         "keytabFileOwnerName", "keytabFileOwnerAccess",
         "keytabFileGroupName", "keytabFileGroupAccess",
-        "true");
+        "true", "false");
 
     writer.close();
     Assert.assertTrue(writer.isClosed());
@@ -205,4 +205,4 @@ public class KerberosIdentityDataFileTest {
     Assert.assertEquals(0, i);
 
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
index f63e6b8..a43db4d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
@@ -120,7 +120,7 @@ public class KerberosServerActionTest {
           "principal|_HOST|_REALM" + i, "principal_type", "keytabFilePath" + i,
           "keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
           "keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
-          "false");
+          "false", "false");
     }
     writer.close();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index d07ac15..9f87312 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
@@ -56,7 +55,6 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
@@ -308,12 +306,6 @@ public class ComponentVersionCheckActionTest {
 
     makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
-    // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
-
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
-    assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
-
     // Finalize the upgrade
     Map<String, String> commandParams = new HashMap<>();
     ExecutionCommand executionCommand = new ExecutionCommand();
@@ -470,12 +462,6 @@ public class ComponentVersionCheckActionTest {
     sch = createNewServiceComponentHost(cluster, "ZOOKEEPER", "ZOOKEEPER_SERVER", "h1");
     sch.setVersion(HDP_2_1_1_1);
 
-    // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
-
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
-    assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
-
     // Finalize the upgrade
     Map<String, String> commandParams = new HashMap<>();
     ExecutionCommand executionCommand = new ExecutionCommand();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
new file mode 100644
index 0000000..61ca682
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Injector;
+
+public class RangerUsersyncConfigCalculationTest {
+
+  private Injector m_injector;
+  private Clusters m_clusters;
+  private Field m_clusterField;
+
+  @Before
+  public void setup() throws Exception {
+    m_injector = createMock(Injector.class);
+    m_clusters = createMock(Clusters.class);
+    Cluster cluster = createMock(Cluster.class);
+
+    Map<String, String> mockRangerUsersyncProperties = new HashMap<String, String>() {
+      {
+        put("ranger.usersync.ldap.grouphierarchylevels", "2");
+      }
+    };
+
+    Map<String, String> mockRangerEnvProperties = new HashMap<String, String>();
+
+    Config rangerUsersyncConfig = createMock(Config.class);
+    expect(rangerUsersyncConfig.getType()).andReturn("ranger-ugsync-site").anyTimes();
+    expect(rangerUsersyncConfig.getProperties()).andReturn(mockRangerUsersyncProperties).anyTimes();
+
+    Config rangerEnvConfig = createMock(Config.class);
+    expect(rangerEnvConfig.getType()).andReturn("ranger-env").anyTimes();
+    expect(rangerEnvConfig.getProperties()).andReturn(mockRangerEnvProperties).anyTimes();
+
+    rangerEnvConfig.setProperties(anyObject(Map.class));
+    expectLastCall().atLeastOnce();
+
+    rangerEnvConfig.save();
+    expectLastCall().atLeastOnce();
+
+    expect(cluster.getDesiredConfigByType("ranger-ugsync-site")).andReturn(rangerUsersyncConfig).atLeastOnce();
+    expect(cluster.getDesiredConfigByType("ranger-env")).andReturn(rangerEnvConfig).atLeastOnce();
+    expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
+    expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
+
+    replay(m_injector, m_clusters, cluster, rangerUsersyncConfig, rangerEnvConfig);
+
+    m_clusterField = RangerUsersyncConfigCalculation.class.getDeclaredField("m_clusters");
+    m_clusterField.setAccessible(true);
+
+  }
+
+  @Test
+  public void testAction() throws Exception {
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("clusterName", "cl1");
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("cl1");
+
+    HostRoleCommand hrc = createMock(HostRoleCommand.class);
+    expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+    expect(hrc.getStageId()).andReturn(2L).anyTimes();
+    expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
+    replay(hrc);
+
+    RangerUsersyncConfigCalculation action = new RangerUsersyncConfigCalculation();
+    m_clusterField.set(action, m_clusters);
+
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hrc);
+
+    CommandReport report = action.execute(null);
+    Assert.assertNotNull(report);
+
+    Cluster cl = m_clusters.getCluster("cl1");
+    Config config = cl.getDesiredConfigByType("ranger-env");
+
+    Map<String, String> map = config.getProperties();
+
+    Assert.assertTrue(map.containsKey("is_nested_groupsync_enabled"));
+    Assert.assertEquals("true", map.get("is_nested_groupsync_enabled"));
+
+    report = action.execute(null);
+    Assert.assertNotNull(report);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
index 0f1dd55..d208cfc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
@@ -19,147 +19,113 @@
 package org.apache.ambari.server.stack;
 
 import java.io.File;
-import java.io.IOException;
+import java.io.InputStream;
 import java.net.URL;
+import java.util.Set;
+import java.util.regex.Pattern;
 
-import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
-import org.springframework.util.Assert;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.networknt.schema.JsonSchema;
+import com.networknt.schema.JsonSchemaFactory;
+import com.networknt.schema.ValidationMessage;
+
+import junit.framework.Assert;
 
 /**
  * KerberosDescriptorTest tests the stack- and service-level descriptors for certain stacks
  * and services
  */
-@Ignore
+@Category({category.KerberosTest.class})
 public class KerberosDescriptorTest {
-  private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
+  private static Logger LOG = LoggerFactory.getLogger(KerberosDescriptorTest.class);
+
+  private static final Pattern PATTERN_KERBEROS_DESCRIPTOR_FILENAME = Pattern.compile("^kerberos(?:_preconfigure)?\\.json$");
 
   private static File stacksDirectory;
-  private static File hdpStackDirectory;
-  private static File hdp22StackDirectory;
-  private static File hdp22ServicesDirectory;
   private static File commonServicesDirectory;
 
   @BeforeClass
   public static void beforeClass() {
     URL rootDirectoryURL = KerberosDescriptorTest.class.getResource("/");
-    Assert.notNull(rootDirectoryURL);
+    Assert.assertNotNull(rootDirectoryURL);
 
     File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
-    Assert.notNull(resourcesDirectory);
-    Assert.isTrue(resourcesDirectory.canRead());
+    Assert.assertNotNull(resourcesDirectory);
+    Assert.assertTrue(resourcesDirectory.canRead());
 
     stacksDirectory = new File(resourcesDirectory, "stacks");
-    Assert.notNull(stacksDirectory);
-    Assert.isTrue(stacksDirectory.canRead());
-
-    hdpStackDirectory = new File(stacksDirectory, "HDP");
-    Assert.notNull(hdpStackDirectory);
-    Assert.isTrue(hdpStackDirectory.canRead());
-
-    hdp22StackDirectory = new File(hdpStackDirectory, "2.2");
-    Assert.notNull(hdp22StackDirectory);
-    Assert.isTrue(hdp22StackDirectory.canRead());
-
-    hdp22ServicesDirectory = new File(hdp22StackDirectory, "services");
-    Assert.notNull(hdp22ServicesDirectory);
-    Assert.isTrue(hdp22ServicesDirectory.canRead());
+    Assert.assertNotNull(stacksDirectory);
+    Assert.assertTrue(stacksDirectory.canRead());
 
     commonServicesDirectory = new File(resourcesDirectory, "common-services");
-    Assert.notNull(commonServicesDirectory);
-    Assert.isTrue(commonServicesDirectory.canRead());
+    Assert.assertNotNull(commonServicesDirectory);
+    Assert.assertTrue(commonServicesDirectory.canRead());
 
   }
 
   @Test
-  public void testCommonHBASEServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "HBASE", "0.96.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("HBASE"));
+  public void testCommonServiceDescriptor() throws Exception {
+    JsonSchema schema = getJsonSchemaFromPath("kerberos_descriptor_schema.json");
+    Assert.assertTrue(visitFile(schema, commonServicesDirectory, true));
   }
 
   @Test
-  public void testCommonHDFSServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "HDFS", "2.1.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("HDFS"));
+  public void testStackServiceDescriptor() throws Exception {
+    JsonSchema schema = getJsonSchemaFromPath("kerberos_descriptor_schema.json");
+    Assert.assertTrue(visitFile(schema, stacksDirectory, true));
   }
 
-  @Test
-  public void testCommonYarnServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "YARN", "2.1.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("YARN"));
-    Assert.notNull(descriptor.getService("MAPREDUCE2"));
-  }
+  private boolean visitFile(JsonSchema schema, File file, boolean previousResult) throws Exception {
 
-  @Test
-  public void testCommonFalconServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "FALCON", "0.5.0.2.1");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("FALCON"));
-  }
+    if (file.isDirectory()) {
+      boolean currentResult = true;
 
-  @Test
-  public void testCommonHiveServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "HIVE", "0.12.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("HIVE"));
-  }
+      File[] files = file.listFiles();
+      if (files != null) {
+        for (File currentFile : files) {
+          currentResult = visitFile(schema, currentFile, previousResult) && currentResult;
+        }
+      }
+      return previousResult && currentResult;
+    } else if (file.isFile()) {
+      if (PATTERN_KERBEROS_DESCRIPTOR_FILENAME.matcher(file.getName()).matches()) {
+        LOG.info("Validating " + file.getAbsolutePath());
 
-  @Test
-  public void testCommonKnoxServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "KNOX", "0.5.0.2.2");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("KNOX"));
-  }
+        JsonNode node = getJsonNodeFromUrl(file.toURI().toURL().toExternalForm());
+        Set<ValidationMessage> errors = schema.validate(node);
 
-  @Test
-  public void testCommonOozieServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor;
+        if ((errors != null) && !errors.isEmpty()) {
+          for (ValidationMessage message : errors) {
+            LOG.error(message.getMessage());
+          }
 
-    descriptor = getKerberosDescriptor(commonServicesDirectory, "OOZIE", "4.0.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("OOZIE"));
-  }
+          return false;
+        }
 
-  @Test
-  public void testCommonStormServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "STORM", "0.9.1");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("STORM"));
-  }
+        return true;
+      } else {
+        return true;
+      }
+    }
 
-  @Test
-  public void testCommonZookeepeerServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "ZOOKEEPER", "3.4.5");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("ZOOKEEPER"));
+    return previousResult;
   }
 
-  @Test
-  public void testCommonSparkServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "SPARK", "1.2.1");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("SPARK"));
+  private JsonNode getJsonNodeFromUrl(String url) throws Exception {
+    ObjectMapper mapper = new ObjectMapper();
+    return mapper.readTree(new URL(url));
   }
 
-  private KerberosDescriptor getKerberosDescriptor(File baseDirectory, String service, String version) throws IOException {
-    File serviceDirectory = new File(baseDirectory, service);
-    File serviceVersionDirectory = new File(serviceDirectory, version);
-    return KERBEROS_DESCRIPTOR_FACTORY.createInstance(new File(serviceVersionDirectory, "kerberos.json"));
+  private JsonSchema getJsonSchemaFromPath(String name) throws Exception {
+    JsonSchemaFactory factory = new JsonSchemaFactory();
+    InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(name);
+    return factory.getSchema(is);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java
new file mode 100644
index 0000000..5983dce
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+
+import org.apache.ambari.server.AmbariException;
+
+import org.junit.Test;
+
+/**
+ * Tests for StackServiceDirectory
+ */
+public class StackServiceDirectoryTest {
+
+  private MockStackServiceDirectory createStackServiceDirectory(String servicePath) throws AmbariException {
+    MockStackServiceDirectory ssd = new MockStackServiceDirectory(servicePath);
+    return ssd;
+  }
+
+  @Test
+  public void testValidServiceAdvisorClassName() throws Exception {
+    String pathWithInvalidChars = "/Fake-Stack.Name/1.0/services/FAKESERVICE/";
+    String serviceNameValidChars = "FakeService";
+
+    String pathWithValidChars = "/FakeStackName/1.0/services/FAKESERVICE/";
+    String serviceNameInvalidChars = "Fake-Serv.ice";
+
+    String desiredServiceAdvisorName = "FakeStackName10FakeServiceServiceAdvisor";
+
+    MockStackServiceDirectory ssd1 = createStackServiceDirectory(pathWithInvalidChars);
+    assertEquals(desiredServiceAdvisorName, ssd1.getAdvisorName(serviceNameValidChars));
+
+    MockStackServiceDirectory ssd2 = createStackServiceDirectory(pathWithValidChars);
+    assertEquals(desiredServiceAdvisorName, ssd2.getAdvisorName(serviceNameInvalidChars));
+
+    MockStackServiceDirectory ssd3 = createStackServiceDirectory(pathWithInvalidChars);
+    assertEquals(desiredServiceAdvisorName, ssd3.getAdvisorName(serviceNameInvalidChars));
+
+    MockStackServiceDirectory ssd4 = createStackServiceDirectory(pathWithValidChars);
+    assertEquals(desiredServiceAdvisorName, ssd4.getAdvisorName(serviceNameValidChars));
+  }
+
+  private class MockStackServiceDirectory extends StackServiceDirectory {
+    File advisor = null;
+
+    MockStackServiceDirectory (String servicePath) throws AmbariException {
+      super(servicePath);
+      advisor = new File(servicePath, StackDirectory.SERVICE_ADVISOR_FILE_NAME);
+    }
+
+    protected void parsePath() {}
+
+    public File getAdvisorFile() {
+      return advisor;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
index d341a09..0cc43ba 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
@@ -274,15 +274,28 @@ public class ServiceComponentTest {
     service.addServiceComponent(component);
 
     addHostToCluster("h1", service.getCluster().getClusterName());
+    addHostToCluster("h2", service.getCluster().getClusterName());
+    addHostToCluster("h3", service.getCluster().getClusterName());
     ServiceComponentHost sch =
       serviceComponentHostFactory.createNew(component, "h1");
+    ServiceComponentHost sch2 =
+      serviceComponentHostFactory.createNew(component, "h2");
+    ServiceComponentHost sch3 =
+      serviceComponentHostFactory.createNew(component, "h3");
     sch.setState(State.INSTALLED);
+    sch2.setState(State.INSTALLED);
+    sch3.setState(State.INSTALLED);
 
     Map<String, ServiceComponentHost> compHosts =
       new HashMap<>();
     compHosts.put("h1", sch);
+    compHosts.put("h2", sch2);
+    compHosts.put("h3", sch3);
     component.addServiceComponentHosts(compHosts);
-    Assert.assertEquals(1, component.getServiceComponentHosts().size());
+    Assert.assertEquals(3, component.getServiceComponentHosts().size());
+
+    component.getServiceComponentHost("h2").setMaintenanceState(MaintenanceState.ON);
+    sch3.setMaintenanceState(MaintenanceState.ON);
 
     ServiceComponent sc = service.getServiceComponent(componentName);
     Assert.assertNotNull(sc);
@@ -299,9 +312,11 @@ public class ServiceComponentTest {
     int totalCount = r.getServiceComponentStateCount().get("totalCount");
     int startedCount = r.getServiceComponentStateCount().get("startedCount");
     int installedCount = r.getServiceComponentStateCount().get("installedCount");
-    Assert.assertEquals(1, totalCount);
+    int installedAndMaintenanceOffCount = r.getServiceComponentStateCount().get("installedAndMaintenanceOffCount");
+    Assert.assertEquals(3, totalCount);
     Assert.assertEquals(0, startedCount);
-    Assert.assertEquals(1, installedCount);
+    Assert.assertEquals(3, installedCount);
+    Assert.assertEquals(1, installedAndMaintenanceOffCount);
 
     // TODO check configs
     // r.getConfigVersions()


[2/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

Posted by mp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index b875db6..4eb213b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -2537,6 +2537,50 @@ public class UpgradeHelperTest extends EasyMockSupport {
   }
 
   @Test
+  public void testSequentialServiceChecksWithServiceCheckFailure() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+    assertTrue(upgrades.containsKey("upgrade_test_checks"));
+    UpgradePack upgrade = upgrades.get("upgrade_test_checks");
+    assertNotNull(upgrade);
+
+    // !!! fake skippable so we don't affect other tests
+    for (Grouping g : upgrade.getAllGroups()) {
+      if (g.name.equals("SERVICE_CHECK_1") || g.name.equals("SERVICE_CHECK_2")) {
+        g.skippable = true;
+      }
+    }
+
+    Cluster cluster = makeCluster();
+    cluster.deleteService("HDFS", new DeleteHostComponentStatusMetaData());
+    cluster.deleteService("YARN", new DeleteHostComponentStatusMetaData());
+
+    UpgradeContext context = getMockUpgradeContext(cluster, Direction.UPGRADE, UpgradeType.ROLLING, repositoryVersion2110,
+        RepositoryType.STANDARD, cluster.getServices().keySet(), m_masterHostResolver, false);
+    expect(context.isServiceCheckFailureAutoSkipped()).andReturn(Boolean.TRUE).atLeastOnce();
+
+    replay(context);
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+    assertEquals(5, groups.size());
+
+    UpgradeGroupHolder serviceCheckGroup = groups.get(2);
+    assertEquals(ServiceCheckGrouping.class, serviceCheckGroup.groupClass);
+    assertEquals(4, serviceCheckGroup.items.size());
+
+    StageWrapper wrapper = serviceCheckGroup.items.get(0);
+    assertEquals(ServiceCheckGrouping.ServiceCheckStageWrapper.class, wrapper.getClass());
+    assertTrue(wrapper.getText().contains("ZooKeeper"));
+
+    wrapper = serviceCheckGroup.items.get(serviceCheckGroup.items.size()-1);
+    assertTrue(wrapper.getText().equals("Verifying Skipped Failures"));
+
+    // Do stacks cleanup
+    stackManagerMock.invalidateCurrentPaths();
+    ambariMetaInfo.init();
+  }
+
+
+  @Test
   public void testPrematureServiceChecks() throws Exception {
     Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
     assertTrue(upgrades.containsKey("upgrade_test_checks"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
index 09699c6..cbd146b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
@@ -35,9 +35,9 @@ import com.google.gson.reflect.TypeToken;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosComponentDescriptorTest {
-  public static final String JSON_VALUE =
+  static final String JSON_VALUE =
       " {" +
           "  \"name\": \"COMPONENT_NAME\"," +
           "  \"identities\": [" +
@@ -60,9 +60,9 @@ public class KerberosComponentDescriptorTest {
 
   static {
     Map<String, Object> identitiesMap = new TreeMap<>();
-    identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE);
-    identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_ALT.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE_ALT);
-    identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE);
+    identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE.get(KerberosIdentityDescriptor.KEY_NAME), KerberosIdentityDescriptorTest.MAP_VALUE);
+    identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_ALT.get(KerberosIdentityDescriptor.KEY_NAME), KerberosIdentityDescriptorTest.MAP_VALUE_ALT);
+    identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE.get(KerberosIdentityDescriptor.KEY_NAME), KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE);
 
     Map<String, Object> serviceSiteProperties = new TreeMap<>();
     serviceSiteProperties.put("service.component.property1", "red");
@@ -78,10 +78,10 @@ public class KerberosComponentDescriptorTest {
     authToLocalRules.add("component.name.rules2");
 
     MAP_VALUE = new TreeMap<>();
-    MAP_VALUE.put("name", "A_DIFFERENT_COMPONENT_NAME");
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<>(identitiesMap.values()));
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+    MAP_VALUE.put(KerberosIdentityDescriptor.KEY_NAME, "A_DIFFERENT_COMPONENT_NAME");
+    MAP_VALUE.put(KerberosComponentDescriptor.KEY_IDENTITIES, new ArrayList<>(identitiesMap.values()));
+    MAP_VALUE.put(KerberosComponentDescriptor.KEY_CONFIGURATIONS, configurationsMap.values());
+    MAP_VALUE.put(KerberosComponentDescriptor.KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalRules);
   }
 
   static void validateFromJSON(KerberosComponentDescriptor componentDescriptor) {
@@ -238,4 +238,4 @@ public class KerberosComponentDescriptorTest {
 
     validateUpdatedData(componentDescriptor);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
index e891fde..afd6de2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
@@ -32,7 +32,7 @@ import com.google.gson.reflect.TypeToken;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosConfigurationDescriptorTest {
   private static final String JSON_SINGLE_VALUE =
       "{ \"configuration-type\": {" +
@@ -243,4 +243,4 @@ public class KerberosConfigurationDescriptorTest {
     Assert.assertEquals("black", properties.get("property1"));
     Assert.assertEquals("white", properties.get("property2"));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
index 7fb5624..cc33512 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
@@ -40,12 +40,12 @@ import com.google.gson.Gson;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosDescriptorTest {
   private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
   private static final KerberosServiceDescriptorFactory KERBEROS_SERVICE_DESCRIPTOR_FACTORY = new KerberosServiceDescriptorFactory();
 
-  public static final String JSON_VALUE =
+  private static final String JSON_VALUE =
       "{" +
           "  \"properties\": {" +
           "      \"realm\": \"${cluster-env/kerberos_domain}\"," +
@@ -59,30 +59,30 @@ public class KerberosDescriptorTest {
           "    ]" +
           "}";
 
-  public static final Map<String, Object> MAP_VALUE;
+  private static final Map<String, Object> MAP_VALUE;
 
   static {
     Map<String, Object> keytabOwnerMap = new TreeMap<>();
-    keytabOwnerMap.put("name", "root");
-    keytabOwnerMap.put("access", "rw");
+    keytabOwnerMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "root");
+    keytabOwnerMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "rw");
 
     Map<String, Object> keytabGroupMap = new TreeMap<>();
-    keytabGroupMap.put("name", "hadoop");
-    keytabGroupMap.put("access", "r");
+    keytabGroupMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "hadoop");
+    keytabGroupMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "r");
 
     Map<String, Object> keytabMap = new TreeMap<>();
-    keytabMap.put("file", "/etc/security/keytabs/subject.service.keytab");
-    keytabMap.put("owner", keytabOwnerMap);
-    keytabMap.put("group", keytabGroupMap);
-    keytabMap.put("configuration", "service-site/service2.component.keytab.file");
+    keytabMap.put(KerberosKeytabDescriptor.KEY_FILE, "/etc/security/keytabs/subject.service.keytab");
+    keytabMap.put(KerberosKeytabDescriptor.KEY_OWNER, keytabOwnerMap);
+    keytabMap.put(KerberosKeytabDescriptor.KEY_GROUP, keytabGroupMap);
+    keytabMap.put(KerberosKeytabDescriptor.KEY_CONFIGURATION, "service-site/service2.component.keytab.file");
 
     Map<String, Object> sharedIdentityMap = new TreeMap<>();
-    sharedIdentityMap.put("name", "shared");
-    sharedIdentityMap.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
-    sharedIdentityMap.put("keytab", keytabMap);
+    sharedIdentityMap.put(KerberosIdentityDescriptor.KEY_NAME, "shared");
+    sharedIdentityMap.put(KerberosIdentityDescriptor.KEY_PRINCIPAL, KerberosPrincipalDescriptorTest.MAP_VALUE);
+    sharedIdentityMap.put(KerberosIdentityDescriptor.KEY_KEYTAB, keytabMap);
 
     Map<String, Object> servicesMap = new TreeMap<>();
-    servicesMap.put((String) KerberosServiceDescriptorTest.MAP_VALUE.get("name"), KerberosServiceDescriptorTest.MAP_VALUE);
+    servicesMap.put((String) KerberosServiceDescriptorTest.MAP_VALUE.get(KerberosServiceDescriptor.KEY_NAME), KerberosServiceDescriptorTest.MAP_VALUE);
 
     Map<String, Object> identitiesMap = new TreeMap<>();
     identitiesMap.put("shared", sharedIdentityMap);
@@ -104,14 +104,14 @@ public class KerberosDescriptorTest {
     properties.put("some.property", "Hello World");
 
     MAP_VALUE = new TreeMap<>();
-    MAP_VALUE.put("properties", properties);
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.SERVICE.getDescriptorPluralName(), servicesMap.values());
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), identitiesMap.values());
+    MAP_VALUE.put(KerberosDescriptor.KEY_PROPERTIES, properties);
+    MAP_VALUE.put(KerberosDescriptor.KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalRules);
+    MAP_VALUE.put(KerberosDescriptor.KEY_SERVICES, servicesMap.values());
+    MAP_VALUE.put(KerberosDescriptor.KEY_CONFIGURATIONS, configurationsMap.values());
+    MAP_VALUE.put(KerberosDescriptor.KEY_IDENTITIES, identitiesMap.values());
   }
 
-  public static void validateFromJSON(KerberosDescriptor descriptor) {
+  private static void validateFromJSON(KerberosDescriptor descriptor) {
     Assert.assertNotNull(descriptor);
     Assert.assertTrue(descriptor.isContainer());
 
@@ -146,7 +146,7 @@ public class KerberosDescriptorTest {
     Assert.assertNull(configurations);
   }
 
-  public static void validateFromMap(KerberosDescriptor descriptor) throws AmbariException {
+  private static void validateFromMap(KerberosDescriptor descriptor) throws AmbariException {
     Assert.assertNotNull(descriptor);
     Assert.assertTrue(descriptor.isContainer());
 
@@ -219,7 +219,7 @@ public class KerberosDescriptorTest {
     Assert.assertEquals("red", configProperties.get("property1"));
   }
 
-  public void validateUpdatedData(KerberosDescriptor descriptor) {
+  private void validateUpdatedData(KerberosDescriptor descriptor) {
     Assert.assertNotNull(descriptor);
 
     Map<String, String> properties = descriptor.getProperties();
@@ -420,7 +420,7 @@ public class KerberosDescriptorTest {
 
   @Test
   public void testGetReferencedIdentityDescriptor_Recursive() throws IOException {
-    boolean identityFound = false;
+    boolean identityFound;
     List<KerberosIdentityDescriptor> identities;
 
     URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_get_referenced_identity_descriptor.json");
@@ -482,8 +482,8 @@ public class KerberosDescriptorTest {
   public void testFiltersOutIdentitiesBasedonInstalledServices() throws IOException {
     URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_filtering_identity_descriptor.json");
     KerberosComponentDescriptor componentDescriptor = KERBEROS_DESCRIPTOR_FACTORY.createInstance(new File(systemResourceURL.getFile()))
-      .getService("SERVICE1")
-      .getComponent("SERVICE1_COMPONENT1");
+        .getService("SERVICE1")
+        .getComponent("SERVICE1_COMPONENT1");
     List<KerberosIdentityDescriptor> identities = componentDescriptor.getIdentities(true, new HashedMap() {{
       put("services", Collections.emptySet());
     }});
@@ -502,4 +502,4 @@ public class KerberosDescriptorTest {
     Assert.assertEquals("service2_component1@${realm}", principalsPerComponent.get("SERVICE2/SERVICE2_COMPONENT1/service2_component1_identity"));
     Assert.assertEquals("service1@${realm}", principalsPerComponent.get("SERVICE1/service1_identity"));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
index 44812de..5faf7d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
@@ -50,7 +50,7 @@ import com.google.inject.assistedinject.FactoryModuleBuilder;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosDescriptorUpdateHelperTest extends EasyMockSupport {
   private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
   private static final Gson GSON = new Gson();
@@ -2343,4 +2343,4 @@ public class KerberosDescriptorUpdateHelperTest extends EasyMockSupport {
                 "}\n").toMap()),
         GSON.toJson(newValue.toMap()));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
index d11962b..5c7075f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
@@ -31,9 +31,9 @@ import com.google.gson.reflect.TypeToken;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosIdentityDescriptorTest {
-  public static final String JSON_VALUE =
+  static final String JSON_VALUE =
       "{" +
           "  \"name\": \"identity_1\"" +
           "," +
@@ -50,63 +50,58 @@ public class KerberosIdentityDescriptorTest {
 
   static {
     MAP_VALUE = new TreeMap<>();
-    MAP_VALUE.put("name", "identity_1");
-    MAP_VALUE.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
-    MAP_VALUE.put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
-    MAP_VALUE.put("password", "secret");
+    MAP_VALUE.put(KerberosIdentityDescriptor.KEY_NAME, "identity_1");
+    MAP_VALUE.put(KerberosIdentityDescriptor.KEY_PRINCIPAL, KerberosPrincipalDescriptorTest.MAP_VALUE);
+    MAP_VALUE.put(KerberosIdentityDescriptor.KEY_KEYTAB, KerberosKeytabDescriptorTest.MAP_VALUE);
 
     MAP_VALUE_ALT = new TreeMap<>();
-    MAP_VALUE_ALT.put("name", "identity_2");
-    MAP_VALUE_ALT.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
-    MAP_VALUE_ALT.put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
-    MAP_VALUE_ALT.put("password", "secret2");
+    MAP_VALUE_ALT.put(KerberosIdentityDescriptor.KEY_NAME, "identity_2");
+    MAP_VALUE_ALT.put(KerberosIdentityDescriptor.KEY_PRINCIPAL, KerberosPrincipalDescriptorTest.MAP_VALUE);
+    MAP_VALUE_ALT.put(KerberosIdentityDescriptor.KEY_KEYTAB, KerberosKeytabDescriptorTest.MAP_VALUE);
 
     TreeMap<String, Object> ownerMap = new TreeMap<>();
-    ownerMap.put("name", "me");
-    ownerMap.put("access", "rw");
+    ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "me");
+    ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "rw");
 
     TreeMap<String, Object> groupMap = new TreeMap<>();
-    groupMap.put("name", "nobody");
-    groupMap.put("access", "");
+    groupMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "nobody");
+    groupMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "");
 
 
     TreeMap<String, Object> keytabMap = new TreeMap<>();
-    keytabMap.put("file", "/home/user/me/subject.service.keytab");
-    keytabMap.put("owner", ownerMap);
-    keytabMap.put("group", groupMap);
-    keytabMap.put("configuration", "service-site/me.component.keytab.file");
+    keytabMap.put(KerberosKeytabDescriptor.KEY_FILE, "/home/user/me/subject.service.keytab");
+    keytabMap.put(KerberosKeytabDescriptor.KEY_OWNER, ownerMap);
+    keytabMap.put(KerberosKeytabDescriptor.KEY_GROUP, groupMap);
+    keytabMap.put(KerberosKeytabDescriptor.KEY_CONFIGURATION, "service-site/me.component.keytab.file");
 
     MAP_VALUE_REFERENCE = new TreeMap<>();
-    MAP_VALUE_REFERENCE.put("name", "shared_identity");
-    MAP_VALUE_REFERENCE.put("reference", "/shared");
-    MAP_VALUE_REFERENCE.put("keytab", keytabMap);
+    MAP_VALUE_REFERENCE.put(KerberosIdentityDescriptor.KEY_NAME, "shared_identity");
+    MAP_VALUE_REFERENCE.put(KerberosIdentityDescriptor.KEY_REFERENCE, "/shared");
+    MAP_VALUE_REFERENCE.put(KerberosIdentityDescriptor.KEY_KEYTAB, keytabMap);
   }
 
 
-  public static void validateFromJSON(KerberosIdentityDescriptor identityDescriptor) {
+  static void validateFromJSON(KerberosIdentityDescriptor identityDescriptor) {
     Assert.assertNotNull(identityDescriptor);
     Assert.assertFalse(identityDescriptor.isContainer());
 
     KerberosPrincipalDescriptorTest.validateFromJSON(identityDescriptor.getPrincipalDescriptor());
     KerberosKeytabDescriptorTest.validateFromJSON(identityDescriptor.getKeytabDescriptor());
-    Assert.assertNull(identityDescriptor.getPassword());
   }
 
-  public static void validateFromMap(KerberosIdentityDescriptor identityDescriptor) {
+  static void validateFromMap(KerberosIdentityDescriptor identityDescriptor) {
     Assert.assertNotNull(identityDescriptor);
     Assert.assertFalse(identityDescriptor.isContainer());
 
     KerberosPrincipalDescriptorTest.validateFromMap(identityDescriptor.getPrincipalDescriptor());
     KerberosKeytabDescriptorTest.validateFromMap(identityDescriptor.getKeytabDescriptor());
-    Assert.assertEquals("secret", identityDescriptor.getPassword());
   }
 
-  public static void validateUpdatedData(KerberosIdentityDescriptor identityDescriptor) {
+  static void validateUpdatedData(KerberosIdentityDescriptor identityDescriptor) {
     Assert.assertNotNull(identityDescriptor);
 
     KerberosPrincipalDescriptorTest.validateUpdatedData(identityDescriptor.getPrincipalDescriptor());
     KerberosKeytabDescriptorTest.validateUpdatedData(identityDescriptor.getKeytabDescriptor());
-    Assert.assertEquals("secret", identityDescriptor.getPassword());
   }
 
   private static KerberosIdentityDescriptor createFromJSON() {
@@ -167,4 +162,4 @@ public class KerberosIdentityDescriptorTest {
     context.put("services", new HashSet<>(Arrays.asList("NOT_HIVE", "HDFS", "ZOOKEEPER")));
     Assert.assertFalse(identityDescriptor.shouldInclude(context));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
index 8cdb39e..8bb179d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
@@ -29,9 +29,9 @@ import com.google.gson.reflect.TypeToken;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosKeytabDescriptorTest {
-  public static final String JSON_VALUE =
+  static final String JSON_VALUE =
       "{" +
           "  \"file\": \"/etc/security/keytabs/${host}/subject.service.keytab\"," +
           "  \"owner\": {" +
@@ -45,25 +45,25 @@ public class KerberosKeytabDescriptorTest {
           "  \"configuration\": \"service-site/service.component.keytab.file\"" +
           "}";
 
-  public static final Map<String, Object> MAP_VALUE;
+  static final Map<String, Object> MAP_VALUE;
 
   static {
     TreeMap<String, Object> ownerMap = new TreeMap<>();
-    ownerMap.put("name", "root");
-    ownerMap.put("access", "rw");
+    ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "root");
+    ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "rw");
 
     TreeMap<String, Object> groupMap = new TreeMap<>();
-    groupMap.put("name", "hadoop");
-    groupMap.put("access", "r");
+    groupMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "hadoop");
+    groupMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "r");
 
     MAP_VALUE = new TreeMap<>();
-    MAP_VALUE.put("file", "/etc/security/keytabs/subject.service.keytab");
-    MAP_VALUE.put("owner", ownerMap);
-    MAP_VALUE.put("group", groupMap);
-    MAP_VALUE.put("configuration", "service-site/service2.component.keytab.file");
+    MAP_VALUE.put(KerberosKeytabDescriptor.KEY_FILE, "/etc/security/keytabs/subject.service.keytab");
+    MAP_VALUE.put(KerberosKeytabDescriptor.KEY_OWNER, ownerMap);
+    MAP_VALUE.put(KerberosKeytabDescriptor.KEY_GROUP, groupMap);
+    MAP_VALUE.put(KerberosKeytabDescriptor.KEY_CONFIGURATION, "service-site/service2.component.keytab.file");
   }
 
-  public static void validateFromJSON(KerberosKeytabDescriptor keytabDescriptor) {
+  static void validateFromJSON(KerberosKeytabDescriptor keytabDescriptor) {
     Assert.assertNotNull(keytabDescriptor);
     Assert.assertFalse(keytabDescriptor.isContainer());
 
@@ -75,7 +75,7 @@ public class KerberosKeytabDescriptorTest {
     Assert.assertEquals("service-site/service.component.keytab.file", keytabDescriptor.getConfiguration());
   }
 
-  public static void validateFromMap(KerberosKeytabDescriptor keytabDescriptor) {
+  static void validateFromMap(KerberosKeytabDescriptor keytabDescriptor) {
     Assert.assertNotNull(keytabDescriptor);
     Assert.assertFalse(keytabDescriptor.isContainer());
 
@@ -87,7 +87,7 @@ public class KerberosKeytabDescriptorTest {
     Assert.assertEquals("service-site/service2.component.keytab.file", keytabDescriptor.getConfiguration());
   }
 
-  public static void validateUpdatedData(KerberosKeytabDescriptor keytabDescriptor) {
+  static void validateUpdatedData(KerberosKeytabDescriptor keytabDescriptor) {
     Assert.assertNotNull(keytabDescriptor);
 
     Assert.assertEquals("/etc/security/keytabs/subject.service.keytab", keytabDescriptor.getFile());
@@ -146,4 +146,4 @@ public class KerberosKeytabDescriptorTest {
 
     validateUpdatedData(keytabDescriptor);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
index b84223f..0e30d44 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
@@ -29,9 +29,9 @@ import com.google.gson.reflect.TypeToken;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosPrincipalDescriptorTest {
-  public static final String JSON_VALUE =
+  static final String JSON_VALUE =
       "{" +
           "\"value\": \"service/_HOST@_REALM\"," +
           "\"configuration\": \"service-site/service.component.kerberos.principal\"," +
@@ -39,27 +39,27 @@ public class KerberosPrincipalDescriptorTest {
           "\"local_username\": \"localUser\"" +
           "}";
 
-  public static final String JSON_VALUE_SPARSE =
+  private static final String JSON_VALUE_SPARSE =
       "{" +
           "\"value\": \"serviceOther/_HOST@_REALM\"" +
           "}";
 
   public static final Map<String, Object> MAP_VALUE;
-  public static final Map<String, Object> MAP_VALUE_SPARSE;
+  private static final Map<String, Object> MAP_VALUE_SPARSE;
 
   static {
     MAP_VALUE = new TreeMap<>();
-    MAP_VALUE.put("value", "user@_REALM");
-    MAP_VALUE.put("configuration", "service-site/service.component.kerberos.https.principal");
-    MAP_VALUE.put("type", "user");
-    MAP_VALUE.put("local_username", null);
+    MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_VALUE, "user@_REALM");
+    MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_CONFIGURATION, "service-site/service.component.kerberos.https.principal");
+    MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_TYPE, "user");
+    MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_LOCAL_USERNAME, null);
 
     MAP_VALUE_SPARSE = new TreeMap<>();
-    MAP_VALUE_SPARSE.put("value", "userOther@_REALM");
+    MAP_VALUE_SPARSE.put(KerberosPrincipalDescriptor.KEY_VALUE, "userOther@_REALM");
   }
 
 
-  public static void validateFromJSON(KerberosPrincipalDescriptor principalDescriptor) {
+  static void validateFromJSON(KerberosPrincipalDescriptor principalDescriptor) {
     Assert.assertNotNull(principalDescriptor);
     Assert.assertFalse(principalDescriptor.isContainer());
     Assert.assertEquals("service/_HOST@_REALM", principalDescriptor.getValue());
@@ -68,7 +68,7 @@ public class KerberosPrincipalDescriptorTest {
     Assert.assertEquals("localUser", principalDescriptor.getLocalUsername());
   }
 
-  public static void validateFromMap(KerberosPrincipalDescriptor principalDescriptor) {
+  static void validateFromMap(KerberosPrincipalDescriptor principalDescriptor) {
     Assert.assertNotNull(principalDescriptor);
     Assert.assertFalse(principalDescriptor.isContainer());
     Assert.assertEquals("user@_REALM", principalDescriptor.getValue());
@@ -77,7 +77,7 @@ public class KerberosPrincipalDescriptorTest {
     Assert.assertNull(principalDescriptor.getLocalUsername());
   }
 
-  public static void validateUpdatedData(KerberosPrincipalDescriptor principalDescriptor) {
+  static void validateUpdatedData(KerberosPrincipalDescriptor principalDescriptor) {
     Assert.assertNotNull(principalDescriptor);
     Assert.assertEquals("user@_REALM", principalDescriptor.getValue());
     Assert.assertEquals("service-site/service.component.kerberos.https.principal", principalDescriptor.getConfiguration());
@@ -192,4 +192,4 @@ public class KerberosPrincipalDescriptorTest {
     Assert.assertEquals(KerberosPrincipalType.USER, principalDescriptor.getType());
     Assert.assertNull(principalDescriptor.getLocalUsername());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
index e4d3c90..17134c0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
@@ -37,9 +37,9 @@ import com.google.gson.Gson;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class KerberosServiceDescriptorTest {
-  public static final String JSON_VALUE =
+  static final String JSON_VALUE =
       "{" +
           "  \"name\": \"SERVICE_NAME\"," +
           "  \"preconfigure\": \"true\"," +
@@ -62,7 +62,7 @@ public class KerberosServiceDescriptorTest {
           "  ]" +
           "}";
 
-  public static final String JSON_VALUE_SERVICES =
+  private static final String JSON_VALUE_SERVICES =
       "{ " +
           "\"services\" : [" +
           "{" +
@@ -130,22 +130,22 @@ public class KerberosServiceDescriptorTest {
 
     MAP_VALUE = new TreeMap<>();
     MAP_VALUE.put("name", "A_DIFFERENT_SERVICE_NAME");
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), identitiesMap.values());
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.COMPONENT.getDescriptorPluralName(), componentsMap.values());
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
-    MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+    MAP_VALUE.put(KerberosServiceDescriptor.KEY_IDENTITIES, identitiesMap.values());
+    MAP_VALUE.put(KerberosServiceDescriptor.KEY_COMPONENTS, componentsMap.values());
+    MAP_VALUE.put(KerberosServiceDescriptor.KEY_CONFIGURATIONS, configurationsMap.values());
+    MAP_VALUE.put(KerberosServiceDescriptor.KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalRules);
   }
 
   private static final KerberosServiceDescriptorFactory KERBEROS_SERVICE_DESCRIPTOR_FACTORY = new KerberosServiceDescriptorFactory();
 
-  public static void validateFromJSON(KerberosServiceDescriptor[] serviceDescriptors) {
+  private static void validateFromJSON(KerberosServiceDescriptor[] serviceDescriptors) {
     Assert.assertNotNull(serviceDescriptors);
     Assert.assertEquals(2, serviceDescriptors.length);
 
     validateFromJSON(serviceDescriptors[0]);
   }
 
-  public static void validateFromJSON(KerberosServiceDescriptor serviceDescriptor) {
+  static void validateFromJSON(KerberosServiceDescriptor serviceDescriptor) {
     Assert.assertNotNull(serviceDescriptor);
     Assert.assertTrue(serviceDescriptor.isContainer());
 
@@ -190,7 +190,7 @@ public class KerberosServiceDescriptorTest {
     Assert.assertEquals("service.name.rules1", authToLocalProperties.iterator().next());
   }
 
-  public static void validateFromMap(KerberosServiceDescriptor serviceDescriptor) {
+  static void validateFromMap(KerberosServiceDescriptor serviceDescriptor) {
     Assert.assertNotNull(serviceDescriptor);
     Assert.assertTrue(serviceDescriptor.isContainer());
 
@@ -235,7 +235,7 @@ public class KerberosServiceDescriptorTest {
     Assert.assertEquals("service.name.rules2", authToLocalProperties.iterator().next());
   }
 
-  public void validateUpdatedData(KerberosServiceDescriptor serviceDescriptor) {
+  private void validateUpdatedData(KerberosServiceDescriptor serviceDescriptor) {
     Assert.assertNotNull(serviceDescriptor);
 
     Assert.assertEquals("A_DIFFERENT_SERVICE_NAME", serviceDescriptor.getName());
@@ -387,8 +387,6 @@ public class KerberosServiceDescriptorTest {
 
   /**
    * Test a JSON object in which only only a Service and configs are defined, but no Components.
-   *
-   * @throws AmbariException
    */
   @Test
   public void testJSONWithOnlyServiceNameAndConfigurations() throws AmbariException {
@@ -422,4 +420,4 @@ public class KerberosServiceDescriptorTest {
     Assert.assertNotNull(serviceDescriptor);
     Assert.assertEquals("A_DIFFERENT_SERVICE_NAME", serviceDescriptor.getName());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
index 616139c..0eac2be 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
@@ -173,6 +173,7 @@ public class UpgradePackTest {
     ConfigureTask ct = (ConfigureTask) t;
     // check that the Configure task successfully parsed id
     assertEquals("hdp_2_1_1_nm_pre_upgrade", ct.getId());
+    assertFalse(ct.supportsPatch);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
index c3248a3..0daa20f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
@@ -391,7 +391,7 @@ public class ClusterDeployWithStartOnlyTest extends EasyMockSupport {
     ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
     expectLastCall().once();
 
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);
     expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).once();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
index 372d0a1..bbf4fdb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
@@ -368,7 +368,7 @@ public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupp
     ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
     expectLastCall().once();
 
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);
     expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).once();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
index 9620507..059a8be 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
@@ -363,7 +363,7 @@ public class ClusterInstallWithoutStartTest extends EasyMockSupport {
     ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
     expectLastCall().once();
 
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);
     expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).once();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
index feefcab..b2dac8f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.topology;
 
+import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
@@ -30,6 +31,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.Executors;
 
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.easymock.EasyMockRule;
 import org.easymock.Mock;
@@ -60,12 +62,18 @@ public class ConfigureClusterTaskTest {
   @Mock(type = MockType.STRICT)
   private ClusterTopology clusterTopology;
 
+  @Mock(type = MockType.STRICT)
+  private AmbariContext ambariContext;
+
+  @Mock(type = MockType.NICE)
+  private AmbariEventPublisher ambariEventPublisher;
+
   private ConfigureClusterTask testSubject;
 
   @Before
   public void before() {
-    reset(clusterConfigurationRequest, clusterTopology);
-    testSubject = new ConfigureClusterTask(clusterTopology, clusterConfigurationRequest);
+    reset(clusterConfigurationRequest, clusterTopology, ambariContext, ambariEventPublisher);
+    testSubject = new ConfigureClusterTask(clusterTopology, clusterConfigurationRequest, ambariEventPublisher);
   }
 
   @Test
@@ -75,11 +83,15 @@ public class ConfigureClusterTaskTest {
     // is it OK to handle the non existence of hostgroups as a success?!
     expect(clusterConfigurationRequest.getRequiredHostGroups()).andReturn(Collections.emptyList());
     expect(clusterTopology.getHostGroupInfo()).andReturn(Collections.emptyMap());
+    expect(clusterTopology.getClusterId()).andReturn(1L).anyTimes();
+    expect(clusterTopology.getAmbariContext()).andReturn(ambariContext);
+    expect(ambariContext.getClusterName(1L)).andReturn("testCluster");
 
     // this is only called if the "prerequisites" are satisfied
     clusterConfigurationRequest.process();
+    ambariEventPublisher.publish(anyObject());
 
-    replay(clusterConfigurationRequest, clusterTopology);
+    replay(clusterConfigurationRequest, clusterTopology, ambariContext, ambariEventPublisher);
 
     // WHEN
     Boolean result = testSubject.call();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
index 45c8b1a..ac643d7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
@@ -118,6 +118,7 @@ public class TopologyManagerTest {
 
   @Mock(type = MockType.NICE)
   private ProvisionClusterRequest request;
+
   private final PersistedTopologyRequest persistedTopologyRequest = new PersistedTopologyRequest(1, request);
   @Mock(type = MockType.STRICT)
   private LogicalRequestFactory logicalRequestFactory;
@@ -284,7 +285,6 @@ public class TopologyManagerTest {
     expect(request.getDescription()).andReturn("Provision Cluster Test").anyTimes();
     expect(request.getConfiguration()).andReturn(topoConfiguration).anyTimes();
     expect(request.getHostGroupInfo()).andReturn(groupInfoMap).anyTimes();
-    expect(request.getRepositoryVersion()).andReturn("1").anyTimes();
     expect(request.getConfigRecommendationStrategy()).andReturn(ConfigRecommendationStrategy.NEVER_APPLY).anyTimes();
     expect(request.getSecurityConfiguration()).andReturn(null).anyTimes();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 2a62f2e..52c3f62 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -22,6 +22,8 @@ import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.anyString;
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.expectLastCall;
@@ -29,7 +31,10 @@ import static org.easymock.EasyMock.newCapture;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertTrue;
 
+import java.io.File;
+import java.net.URL;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -37,6 +42,7 @@ import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -45,29 +51,50 @@ import javax.persistence.EntityManager;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
+import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.dao.WidgetDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.orm.entities.WidgetEntity;
+import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.commons.io.FileUtils;
 import org.easymock.Capture;
+import org.easymock.EasyMock;
 import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
 import org.easymock.Mock;
 import org.easymock.MockType;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 import org.junit.runner.RunWith;
 
+import com.google.common.collect.Maps;
 import com.google.gson.Gson;
+import com.google.inject.AbstractModule;
 import com.google.inject.Binder;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
@@ -105,38 +132,16 @@ public class UpgradeCatalog260Test {
   @Mock(type = MockType.NICE)
   private OsFamily osFamily;
 
-  @Mock(type = MockType.NICE)
-  private KerberosHelper kerberosHelper;
-
-  @Mock(type = MockType.NICE)
-  private ActionManager actionManager;
-
-  @Mock(type = MockType.NICE)
-  private Config config;
-
-  @Mock(type = MockType.STRICT)
-  private Service service;
-
-  @Mock(type = MockType.NICE)
-  private Clusters clusters;
-
-  @Mock(type = MockType.NICE)
-  private Cluster cluster;
-
-  @Mock(type = MockType.NICE)
-  private Injector injector;
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
 
   @Before
   public void init() {
-    reset(entityManagerProvider, injector);
+    reset(entityManagerProvider);
 
     expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
 
-    expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
-    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
-    expect(injector.getInstance(KerberosHelper.class)).andReturn(kerberosHelper).anyTimes();
-
-    replay(entityManagerProvider, injector);
+    replay(entityManagerProvider);
   }
 
   @After
@@ -190,24 +195,18 @@ public class UpgradeCatalog260Test {
     expectDropStaleTables();
 
     Capture<DBColumnInfo> repoVersionHiddenColumnCapture = newCapture();
-    expectUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture);
+    Capture<DBColumnInfo> repoVersionResolvedColumnCapture = newCapture();
+    expectUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture, repoVersionResolvedColumnCapture);
 
     Capture<DBColumnInfo> unapped = newCapture();
     expectRenameServiceDeletedColumn(unapped);
 
-    replay(dbAccessor, configuration, connection, statement, resultSet);
+    expectAddViewUrlPKConstraint();
+    expectRemoveStaleConstraints();
 
-    Module module = new Module() {
-      @Override
-      public void configure(Binder binder) {
-        binder.bind(DBAccessor.class).toInstance(dbAccessor);
-        binder.bind(OsFamily.class).toInstance(osFamily);
-        binder.bind(EntityManager.class).toInstance(entityManager);
-        binder.bind(Configuration.class).toInstance(configuration);
-      }
-    };
+    replay(dbAccessor, configuration, connection, statement, resultSet);
 
-    Injector injector = Guice.createInjector(module);
+    Injector injector = getInjector();
     UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
     upgradeCatalog260.executeDDLUpdates();
 
@@ -219,7 +218,18 @@ public class UpgradeCatalog260Test {
     verifyAddSelectedCollumsToClusterconfigTable(selectedColumnInfo, selectedmappingColumnInfo, selectedTimestampColumnInfo, createTimestampColumnInfo);
     verifyUpdateUpgradeTable(rvid, orchestration, revertAllowed);
     verifyCreateUpgradeHistoryTable(columns);
-    verifyUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture);
+    verifyUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture, repoVersionResolvedColumnCapture);
+  }
+
+  private void expectRemoveStaleConstraints() throws SQLException {
+    dbAccessor.dropUniqueConstraint(eq(UpgradeCatalog260.USERS_TABLE), eq(UpgradeCatalog260.STALE_POSTGRESS_USERS_LDAP_USER_KEY));
+  }
+
+  private void expectAddViewUrlPKConstraint() throws SQLException {
+    dbAccessor.dropPKConstraint(eq(UpgradeCatalog260.VIEWURL_TABLE), eq(UpgradeCatalog260.STALE_POSTGRESS_VIEWURL_PKEY));
+    expectLastCall().once();
+    dbAccessor.addPKConstraint(eq(UpgradeCatalog260.VIEWURL_TABLE), eq(UpgradeCatalog260.PK_VIEWURL), eq(UpgradeCatalog260.URL_ID_COLUMN));
+    expectLastCall().once();
   }
 
   public void expectDropStaleTables() throws SQLException {
@@ -231,7 +241,7 @@ public class UpgradeCatalog260Test {
     expectLastCall().once();
   }
 
-  public  void expectRenameServiceDeletedColumn(Capture<DBColumnInfo> unmapped) throws SQLException {
+  public void expectRenameServiceDeletedColumn(Capture<DBColumnInfo> unmapped) throws SQLException {
     dbAccessor.renameColumn(eq(UpgradeCatalog260.CLUSTER_CONFIG_TABLE), eq(UpgradeCatalog260.SERVICE_DELETED_COLUMN), capture(unmapped));
     expectLastCall().once();
   }
@@ -301,7 +311,7 @@ public class UpgradeCatalog260Test {
   }
 
   public void verifyUpdateUpgradeTable(Capture<DBColumnInfo> rvid,
-      Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed) {
+                                       Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed) {
     DBColumnInfo rvidValue = rvid.getValue();
     Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_ID_COLUMN, rvidValue.getName());
     Assert.assertEquals(Long.class, rvidValue.getType());
@@ -325,7 +335,7 @@ public class UpgradeCatalog260Test {
   }
 
   public void expectUpdateUpgradeTable(Capture<DBColumnInfo> rvid,
-      Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed)
+                                       Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed)
       throws SQLException {
 
     dbAccessor.clearTable(eq(UpgradeCatalog260.UPGRADE_TABLE));
@@ -471,8 +481,8 @@ public class UpgradeCatalog260Test {
   }
 
   public void verifyGetCurrentVersionID(Capture<String[]> scdcaptureKey, Capture<String[]> scdcaptureValue) {
-    Assert.assertTrue(Arrays.equals(scdcaptureKey.getValue(), new String[]{UpgradeCatalog260.STATE_COLUMN}));
-    Assert.assertTrue(Arrays.equals(scdcaptureValue.getValue(), new String[]{UpgradeCatalog260.CURRENT}));
+    assertTrue(Arrays.equals(scdcaptureKey.getValue(), new String[]{UpgradeCatalog260.STATE_COLUMN}));
+    assertTrue(Arrays.equals(scdcaptureValue.getValue(), new String[]{UpgradeCatalog260.CURRENT}));
   }
 
   public void expectUpdateServiceComponentDesiredStateTable(Capture<DBColumnInfo> scdstadd1, Capture<DBColumnInfo> scdstalter1, Capture<DBColumnInfo> scdstadd2, Capture<DBColumnInfo> scdstalter2) throws SQLException {
@@ -527,19 +537,9 @@ public class UpgradeCatalog260Test {
     expectLastCall().once();
     replay(dbAccessor, configuration, connection, statement, resultSet);
 
-    Module module = new Module() {
-      @Override
-      public void configure(Binder binder) {
-        binder.bind(DBAccessor.class).toInstance(dbAccessor);
-        binder.bind(OsFamily.class).toInstance(osFamily);
-        binder.bind(EntityManager.class).toInstance(entityManager);
-        binder.bind(Configuration.class).toInstance(configuration);
-      }
-    };
-
-    Injector injector = Guice.createInjector(module);
+    Injector injector = getInjector();
     UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
-    upgradeCatalog260.removeSupersetFromDruid();
+    upgradeCatalog260.executePreDMLUpdates();
 
     verify(dbAccessor);
 
@@ -552,42 +552,40 @@ public class UpgradeCatalog260Test {
    * @param hiddenColumnCapture
    * @throws SQLException
    */
-  public void expectUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture) throws SQLException {
+  public void expectUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture,
+                                                      Capture<DBColumnInfo> repoVersionResolvedColumnCapture) throws SQLException {
     dbAccessor.addColumn(eq(UpgradeCatalog260.REPO_VERSION_TABLE), capture(hiddenColumnCapture));
+    dbAccessor.addColumn(eq(UpgradeCatalog260.REPO_VERSION_TABLE), capture(repoVersionResolvedColumnCapture));
     expectLastCall().once();
   }
 
-  public void verifyUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture) {
+  public void verifyUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture,
+                                                      Capture<DBColumnInfo> resolvedColumnCapture) {
     DBColumnInfo hiddenColumn = hiddenColumnCapture.getValue();
     Assert.assertEquals(0, hiddenColumn.getDefaultValue());
     Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_HIDDEN_COLUMN, hiddenColumn.getName());
     Assert.assertEquals(false, hiddenColumn.isNullable());
+
+    DBColumnInfo resolvedColumn = resolvedColumnCapture.getValue();
+    Assert.assertEquals(0, resolvedColumn.getDefaultValue());
+    Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_RESOLVED_COLUMN, resolvedColumn.getName());
+    Assert.assertEquals(false, resolvedColumn.isNullable());
   }
 
   @Test
   public void testEnsureZeppelinProxyUserConfigs() throws AmbariException {
 
-    final Clusters clusters = createMock(Clusters.class);
+    Injector injector = getInjector();
+
+    final Clusters clusters = injector.getInstance(Clusters.class);
     final Cluster cluster = createMock(Cluster.class);
     final Config zeppelinEnvConf = createMock(Config.class);
     final Config coreSiteConf = createMock(Config.class);
     final Config coreSiteConfNew = createMock(Config.class);
-    final AmbariManagementController controller = createMock(AmbariManagementController.class);
+    final AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
 
     Capture<? extends Map<String, String>> captureCoreSiteConfProperties = newCapture();
 
-    Module module = new Module() {
-      @Override
-      public void configure(Binder binder) {
-        binder.bind(DBAccessor.class).toInstance(dbAccessor);
-        binder.bind(OsFamily.class).toInstance(osFamily);
-        binder.bind(EntityManager.class).toInstance(entityManager);
-        binder.bind(Configuration.class).toInstance(configuration);
-        binder.bind(Clusters.class).toInstance(clusters);
-        binder.bind(AmbariManagementController.class).toInstance(controller);
-      }
-    };
-
     expect(clusters.getClusters()).andReturn(Collections.singletonMap("c1", cluster)).once();
 
     expect(cluster.getClusterName()).andReturn("c1").atLeastOnce();
@@ -610,14 +608,264 @@ public class UpgradeCatalog260Test {
 
     replay(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller);
 
-    Injector injector = Guice.createInjector(module);
     UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
     upgradeCatalog260.ensureZeppelinProxyUserConfigs();
 
     verify(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller);
 
-    Assert.assertTrue(captureCoreSiteConfProperties.hasCaptured());
+    assertTrue(captureCoreSiteConfProperties.hasCaptured());
     Assert.assertEquals("existing_value", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.hosts"));
     Assert.assertEquals("*", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.groups"));
   }
+
+  @Test
+  public void testUpdateKerberosDescriptorArtifact() throws Exception {
+
+    Injector injector = getInjector();
+
+    URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_ranger_kms.json");
+    Assert.assertNotNull(systemResourceURL);
+
+    final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(new File(systemResourceURL.getFile()));
+    Assert.assertNotNull(kerberosDescriptor);
+
+    KerberosServiceDescriptor serviceDescriptor;
+    serviceDescriptor = kerberosDescriptor.getService("RANGER_KMS");
+    Assert.assertNotNull(serviceDescriptor);
+    Assert.assertNotNull(serviceDescriptor.getIdentity("/smokeuser"));
+    Assert.assertNotNull(serviceDescriptor.getIdentity("/spnego"));
+
+    KerberosComponentDescriptor componentDescriptor;
+    componentDescriptor = serviceDescriptor.getComponent("RANGER_KMS_SERVER");
+    Assert.assertNotNull(componentDescriptor);
+    Assert.assertNotNull(componentDescriptor.getIdentity("/smokeuser"));
+    Assert.assertNotNull(componentDescriptor.getIdentity("/spnego"));
+    Assert.assertNotNull(componentDescriptor.getIdentity("/spnego").getPrincipalDescriptor());
+    Assert.assertEquals("invalid_name@${realm}", componentDescriptor.getIdentity("/spnego").getPrincipalDescriptor().getValue());
+
+    ArtifactEntity artifactEntity = createMock(ArtifactEntity.class);
+
+    expect(artifactEntity.getArtifactData()).andReturn(kerberosDescriptor.toMap()).once();
+
+    Capture<Map<String, Object>> captureMap = newCapture();
+    expect(artifactEntity.getForeignKeys()).andReturn(Collections.singletonMap("cluster", "2"));
+    artifactEntity.setArtifactData(capture(captureMap));
+    expectLastCall().once();
+
+    ArtifactDAO artifactDAO = createMock(ArtifactDAO.class);
+    expect(artifactDAO.merge(artifactEntity)).andReturn(artifactEntity).atLeastOnce();
+
+    Map<String, String> properties = new HashMap<>();
+    properties.put("ranger.ks.kerberos.principal", "correct_value@EXAMPLE.COM");
+    properties.put("xasecure.audit.jaas.Client.option.principal", "wrong_value@EXAMPLE.COM");
+
+    Config config = createMock(Config.class);
+    expect(config.getProperties()).andReturn(properties).anyTimes();
+    expect(config.getPropertiesAttributes()).andReturn(Collections.<String, Map<String, String>>emptyMap()).anyTimes();
+    expect(config.getTag()).andReturn("version1").anyTimes();
+    expect(config.getType()).andReturn("ranger-kms-audit").anyTimes();
+
+    Config newConfig = createMock(Config.class);
+    expect(newConfig.getTag()).andReturn("version2").anyTimes();
+    expect(newConfig.getType()).andReturn("ranger-kms-audit").anyTimes();
+
+    ServiceConfigVersionResponse response = createMock(ServiceConfigVersionResponse.class);
+
+    StackId stackId = createMock(StackId.class);
+
+    Cluster cluster = createMock(Cluster.class);
+    expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+    expect(cluster.getDesiredConfigByType("dbks-site")).andReturn(config).anyTimes();
+    expect(cluster.getDesiredConfigByType("ranger-kms-audit")).andReturn(config).anyTimes();
+    expect(cluster.getConfigsByType("ranger-kms-audit")).andReturn(Collections.singletonMap("version1", config)).anyTimes();
+    expect(cluster.getServiceByConfigType("ranger-kms-audit")).andReturn("RANGER").anyTimes();
+    expect(cluster.getClusterName()).andReturn("cl1").anyTimes();
+    expect(cluster.getConfig(eq("ranger-kms-audit"), anyString())).andReturn(newConfig).once();
+    expect(cluster.addDesiredConfig("ambari-upgrade", Collections.singleton(newConfig), "Updated ranger-kms-audit during Ambari Upgrade from 2.5.2 to 2.6.0.")).andReturn(response).once();
+
+    final Clusters clusters = injector.getInstance(Clusters.class);
+    expect(clusters.getCluster(2L)).andReturn(cluster).anyTimes();
+
+    Capture<? extends Map<String, String>> captureProperties = newCapture();
+
+    AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
+    expect(controller.createConfig(eq(cluster), eq(stackId), eq("ranger-kms-audit"), capture(captureProperties), anyString(), anyObject(Map.class)))
+        .andReturn(null)
+        .once();
+
+    replay(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, response, controller, stackId);
+
+    UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
+    upgradeCatalog260.updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
+    verify(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, response, controller, stackId);
+
+    KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(captureMap.getValue());
+    Assert.assertNotNull(kerberosDescriptorUpdated);
+
+    Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("/smokeuser"));
+    Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("/smokeuser"));
+
+    KerberosIdentityDescriptor identity;
+
+    Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("/spnego"));
+    identity = kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("ranger_kms_spnego");
+    Assert.assertNotNull(identity);
+    Assert.assertEquals("/spnego", identity.getReference());
+
+    Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("/spnego"));
+    identity = kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("ranger_kms_ranger_kms_server_spnego");
+    Assert.assertNotNull(identity);
+    Assert.assertEquals("/spnego", identity.getReference());
+    Assert.assertNotNull(identity.getPrincipalDescriptor());
+    Assert.assertNull(identity.getPrincipalDescriptor().getValue());
+
+    Assert.assertTrue(captureProperties.hasCaptured());
+    Map<String, String> newProperties = captureProperties.getValue();
+    Assert.assertEquals("correct_value@EXAMPLE.COM", newProperties.get("xasecure.audit.jaas.Client.option.principal"));
+  }
+
+  @Test
+  public void testUpdateAmsConfigs() throws Exception {
+
+    Map<String, String> oldProperties = new HashMap<String, String>() {
+      {
+        put("ssl.client.truststore.location", "/some/location");
+        put("ssl.client.truststore.alias", "test_alias");
+      }
+    };
+    Map<String, String> newProperties = new HashMap<String, String>() {
+      {
+        put("ssl.client.truststore.location", "/some/location");
+      }
+    };
+
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+    Config mockAmsSslClient = easyMockSupport.createNiceMock(Config.class);
+
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).once();
+    expect(cluster.getDesiredConfigByType("ams-ssl-client")).andReturn(mockAmsSslClient).atLeastOnce();
+    expect(mockAmsSslClient.getProperties()).andReturn(oldProperties).anyTimes();
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+
+    replay(injector, clusters, mockAmsSslClient, cluster);
+
+    AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+        .addMockedMethod("createConfiguration")
+        .addMockedMethod("getClusters", new Class[] { })
+        .addMockedMethod("createConfig")
+        .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+        .createNiceMock();
+
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+    expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(controller.createConfig(anyObject(Cluster.class), anyObject(StackId.class), anyString(), capture(propertiesCapture), anyString(),
+        anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+
+    replay(controller, injector2);
+    new UpgradeCatalog260(injector2).updateAmsConfigs();
+    easyMockSupport.verifyAll();
+
+    Map<String, String> updatedProperties = propertiesCapture.getValue();
+    assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
+  }
+
+  @Test
+  public void testHDFSWidgetUpdate() throws Exception {
+    final Clusters clusters = createNiceMock(Clusters.class);
+    final Cluster cluster = createNiceMock(Cluster.class);
+    final AmbariManagementController controller = createNiceMock(AmbariManagementController.class);
+    final Gson gson = new Gson();
+    final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
+    final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
+    WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
+    StackId stackId = new StackId("HDP", "2.0.0");
+    StackInfo stackInfo = createNiceMock(StackInfo.class);
+    ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
+    Service service  = createNiceMock(Service.class);
+
+    String widgetStr = "{\n" +
+        "  \"layouts\": [\n" +
+        "      {\n" +
+        "      \"layout_name\": \"default_hdfs_heatmap\",\n" +
+        "      \"display_name\": \"Standard HDFS HeatMaps\",\n" +
+        "      \"section_name\": \"HDFS_HEATMAPS\",\n" +
+        "      \"widgetLayoutInfo\": [\n" +
+        "        {\n" +
+        "          \"widget_name\": \"HDFS Bytes Read\",\n" +
+        "          \"metrics\": [],\n" +
+        "          \"values\": []\n" +
+        "        }\n" +
+        "      ]\n" +
+        "    }\n" +
+        "  ]\n" +
+        "}";
+
+    File dataDirectory = temporaryFolder.newFolder();
+    File file = new File(dataDirectory, "hdfs_widget.json");
+    FileUtils.writeStringToFile(file, widgetStr);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+        bind(AmbariManagementController.class).toInstance(controller);
+        bind(Clusters.class).toInstance(clusters);
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+        bind(Gson.class).toInstance(gson);
+        bind(WidgetDAO.class).toInstance(widgetDAO);
+        bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+        bind(AmbariMetaInfo.class).toInstance(metaInfo);
+      }
+    });
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).anyTimes();
+    expect(cluster.getServices()).andReturn(Collections.singletonMap("HDFS", service)).anyTimes();
+    expect(cluster.getClusterId()).andReturn(1L).anyTimes();
+    expect(service.getDesiredStackId()).andReturn(stackId).anyTimes();
+    expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
+    expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+    expect(metaInfo.getStack("HDP", "2.0.0")).andReturn(stackInfo).anyTimes();
+    expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
+
+    expect(widgetDAO.findByName(1L, "HDFS Bytes Read", "ambari", "HDFS_HEATMAPS"))
+        .andReturn(Collections.singletonList(widgetEntity));
+    expect(widgetDAO.merge(widgetEntity)).andReturn(null);
+    expect(widgetEntity.getWidgetName()).andReturn("HDFS Bytes Read").anyTimes();
+
+    replay(clusters, cluster, controller, widgetDAO, metaInfo, widgetEntity, stackInfo, serviceInfo, service);
+
+    mockInjector.getInstance(UpgradeCatalog260.class).updateHDFSWidgetDefinition();
+
+    verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, serviceInfo);
+  }
+
+  private Injector getInjector() {
+
+    return Guice.createInjector(new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(OsFamily.class).toInstance(osFamily);
+        binder.bind(EntityManager.class).toInstance(entityManager);
+        binder.bind(Configuration.class).toInstance(configuration);
+        binder.bind(Clusters.class).toInstance(createMock(Clusters.class));
+        binder.bind(AmbariManagementController.class).toInstance(createMock(AmbariManagementController.class));
+      }
+    });
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java
new file mode 100644
index 0000000..a04c38b
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.utils;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class ManagedThreadPoolExecutorTest {
+
+  @Test
+  public void testGetHostAndPortFromProperty() {
+
+    ManagedThreadPoolExecutor  topologyTaskExecutor = new ManagedThreadPoolExecutor(1,
+            1, 0L, TimeUnit.MILLISECONDS,
+            new LinkedBlockingQueue<Runnable>());
+    Future<Boolean> feature = topologyTaskExecutor.submit(new Callable<Boolean>() {
+      @Override
+      public Boolean call() {
+        return Boolean.TRUE;
+      }
+    });
+
+    Assert.assertTrue(!topologyTaskExecutor.isRunning());
+    topologyTaskExecutor.start();
+    Assert.assertTrue(topologyTaskExecutor.isRunning());
+    topologyTaskExecutor.stop();
+    Assert.assertTrue(!topologyTaskExecutor.isRunning());
+
+  }
+}


[4/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

Posted by mp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index d1de998..c31469e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -21,6 +21,8 @@ import static org.apache.ambari.server.view.ViewContextImpl.CORE_SITE;
 
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -29,11 +31,24 @@ import javax.persistence.EntityManager;
 import javax.persistence.Query;
 
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -70,6 +85,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
 
   public static final String REPO_VERSION_TABLE = "repo_version";
   public static final String REPO_VERSION_ID_COLUMN = "repo_version_id";
+  public static final String REPO_VERSION_RESOLVED_COLUMN = "resolved";
   public static final String REPO_VERSION_HIDDEN_COLUMN = "hidden";
 
   public static final String HOST_COMPONENT_DESIRED_STATE_TABLE = "hostcomponentdesiredstate";
@@ -97,6 +113,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   public static final String FK_UPGRADE_FROM_REPO_ID = "FK_upgrade_from_repo_id";
   public static final String FK_UPGRADE_TO_REPO_ID = "FK_upgrade_to_repo_id";
   public static final String FK_UPGRADE_REPO_VERSION_ID = "FK_upgrade_repo_version_id";
+  public static final String UPGRADE_ITEM_ITEM_TEXT = "item_text";
 
   public static final String SERVICE_COMPONENT_HISTORY_TABLE = "servicecomponent_history";
   public static final String UPGRADE_HISTORY_TABLE = "upgrade_history";
@@ -115,6 +132,9 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   public static final String HOST_COMPONENT_DESIRED_STATE = "hostcomponentdesiredstate";
   public static final String HOST_COMPONENT_STATE = "hostcomponentstate";
 
+  public static final String AMS_SSL_CLIENT = "ams-ssl-client";
+  public static final String METRIC_TRUSTSTORE_ALIAS = "ssl.client.truststore.alias";
+
   /**
    * Logger.
    */
@@ -123,6 +143,12 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   public static final String NOT_REQUIRED = "NOT_REQUIRED";
   public static final String CURRENT = "CURRENT";
   public static final String SELECTED = "1";
+  public static final String VIEWURL_TABLE = "viewurl";
+  public static final String PK_VIEWURL = "PK_viewurl";
+  public static final String URL_ID_COLUMN = "url_id";
+  public static final String STALE_POSTGRESS_VIEWURL_PKEY = "viewurl_pkey";
+  public static final String USERS_TABLE = "users";
+  public static final String STALE_POSTGRESS_USERS_LDAP_USER_KEY = "users_ldap_user_key";
 
 
   /**
@@ -168,6 +194,34 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
     createUpgradeHistoryTable();
     updateRepositoryVersionTable();
     renameServiceDeletedColumn();
+    expandUpgradeItemItemTextColumn();
+    addViewUrlPKConstraint();
+    removeStaleConstraints();
+  }
+
+
+  /**
+   * Updates {@value #VIEWURL_TABLE} table.
+   * Adds the {@value #PK_VIEWURL} constraint.
+   */
+  private void addViewUrlPKConstraint() throws SQLException {
+    dbAccessor.dropPKConstraint(VIEWURL_TABLE, STALE_POSTGRESS_VIEWURL_PKEY);
+    dbAccessor.addPKConstraint(VIEWURL_TABLE, PK_VIEWURL, URL_ID_COLUMN);
+  }
+
+  /**
+   * remove stale unnamed constraints
+   */
+  private void removeStaleConstraints() throws SQLException {
+    dbAccessor.dropUniqueConstraint(USERS_TABLE, STALE_POSTGRESS_USERS_LDAP_USER_KEY);
+  }
+
+  /**
+   * Expand item_text column of upgrade_item
+   */
+  private void expandUpgradeItemItemTextColumn() throws SQLException {
+    dbAccessor.changeColumnType(UPGRADE_ITEM_TABLE, UPGRADE_ITEM_ITEM_TEXT,
+      String.class, char[].class);
   }
 
   private void renameServiceDeletedColumn() throws AmbariException, SQLException {
@@ -360,14 +414,20 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   }
 
   /**
-   * Updates {@value #REPO_VERSION_TABLE} table. Adds
-   * {@value #REPO_VERSION_HIDDEN_COLUMN} column.
+   * Updates {@value #REPO_VERSION_TABLE} table. Adds the following columns:
+   * <ul>
+   * <li>{@value #REPO_VERSION_HIDDEN_COLUMN}
+   * <li>{@value #REPO_VERSION_RESOLVED_COLUMN}
+   * </ul>
    *
    * @throws java.sql.SQLException
    */
   private void updateRepositoryVersionTable() throws SQLException {
     dbAccessor.addColumn(REPO_VERSION_TABLE,
         new DBAccessor.DBColumnInfo(REPO_VERSION_HIDDEN_COLUMN, Short.class, null, 0, false));
+
+    dbAccessor.addColumn(REPO_VERSION_TABLE,
+        new DBAccessor.DBColumnInfo(REPO_VERSION_RESOLVED_COLUMN, Short.class, null, 0, false));
   }
 
   /**
@@ -375,7 +435,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
    */
   @Override
   protected void executePreDMLUpdates() throws AmbariException, SQLException {
-
+    removeSupersetFromDruid();
   }
 
   /**
@@ -385,8 +445,11 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
     setUnmappedForOrphanedConfigs();
-    removeSupersetFromDruid();
     ensureZeppelinProxyUserConfigs();
+    updateKerberosDescriptorArtifacts();
+    updateAmsConfigs();
+    updateHDFSWidgetDefinition();
+    updateExistingRepositoriesToBeResolved();
   }
 
   public int getCurrentVersionID() throws AmbariException, SQLException {
@@ -495,4 +558,270 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
       }
     }
   }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+      if (data != null) {
+        final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(data);
+        if (kerberosDescriptor != null) {
+          fixRangerKMSKerberosDescriptor(kerberosDescriptor);
+          fixIdentityReferences(getCluster(artifactEntity), kerberosDescriptor);
+
+          artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+          artifactDAO.merge(artifactEntity);
+        }
+      }
+    }
+  }
+
+  protected void fixRangerKMSKerberosDescriptor(KerberosDescriptor kerberosDescriptor) {
+    KerberosServiceDescriptor rangerKmsServiceDescriptor = kerberosDescriptor.getService("RANGER_KMS");
+    if (rangerKmsServiceDescriptor != null) {
+
+      KerberosIdentityDescriptor rangerKmsServiceIdentity = rangerKmsServiceDescriptor.getIdentity("/smokeuser");
+      if (rangerKmsServiceIdentity != null) {
+        rangerKmsServiceDescriptor.removeIdentity("/smokeuser");
+      }
+      KerberosComponentDescriptor rangerKmscomponentDescriptor = rangerKmsServiceDescriptor.getComponent("RANGER_KMS_SERVER");
+      if (rangerKmscomponentDescriptor != null) {
+        KerberosIdentityDescriptor rangerKmsComponentIdentity = rangerKmscomponentDescriptor.getIdentity("/smokeuser");
+        if (rangerKmsComponentIdentity != null) {
+          rangerKmscomponentDescriptor.removeIdentity("/smokeuser");
+        }
+      }
+    }
+  }
+
+  protected void updateAmsConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+
+
+          Config amsSslClient = cluster.getDesiredConfigByType(AMS_SSL_CLIENT);
+          if (amsSslClient != null) {
+            Map<String, String> amsSslClientProperties = amsSslClient.getProperties();
+
+            if (amsSslClientProperties.containsKey(METRIC_TRUSTSTORE_ALIAS)) {
+              LOG.info("Removing " + METRIC_TRUSTSTORE_ALIAS + " from " + AMS_SSL_CLIENT);
+              removeConfigurationPropertiesFromCluster(cluster, AMS_SSL_CLIENT, Collections.singleton(METRIC_TRUSTSTORE_ALIAS));
+            }
+
+          }
+        }
+      }
+    }
+  }
+
+  protected void updateHDFSWidgetDefinition() throws AmbariException {
+    LOG.info("Updating HDFS widget definition.");
+
+    Map<String, List<String>> widgetMap = new HashMap<>();
+    Map<String, String> sectionLayoutMap = new HashMap<>();
+
+    List<String> hdfsHeatmapWidgets = new ArrayList<>(Arrays.asList("HDFS Bytes Read", "HDFS Bytes Written",
+      "DataNode Process Disk I/O Utilization", "DataNode Process Network I/O Utilization"));
+    widgetMap.put("HDFS_HEATMAPS", hdfsHeatmapWidgets);
+    sectionLayoutMap.put("HDFS_HEATMAPS", "default_hdfs_heatmap");
+
+    updateWidgetDefinitionsForService("HDFS", widgetMap, sectionLayoutMap);
+  }
+
+  /**
+   * Retrieves the relevant {@link Cluster} given information from the suppliied {@link ArtifactEntity}.
+   * <p>
+   * The cluster id value is taken from the entity's foreign key value and then used to obtain the cluster object.
+   *
+   * @param artifactEntity an {@link ArtifactEntity}
+   * @return a {@link Cluster}
+   */
+  private Cluster getCluster(ArtifactEntity artifactEntity) {
+    if (artifactEntity != null) {
+      Map<String, String> keys = artifactEntity.getForeignKeys();
+      if (keys != null) {
+        String clusterId = keys.get("cluster");
+        if (StringUtils.isNumeric(clusterId)) {
+          Clusters clusters = injector.getInstance(Clusters.class);
+          try {
+            return clusters.getCluster(Long.valueOf(clusterId));
+          } catch (AmbariException e) {
+            LOG.error(String.format("Failed to obtain cluster using cluster id %s -  %s", clusterId, e.getMessage()), e);
+          }
+        } else {
+          LOG.error(String.format("Failed to obtain cluster id from artifact entity with foreign keys: %s", keys));
+        }
+      }
+    }
+
+    return null;
+  }
+
+  /**
+   * Recursively traverses the Kerberos descriptor to find and fix the identity references.
+   * <p>
+   * Each found identity descriptor that indicates it is a reference by having a <code>name</code>
+   * value that starts with a "/" or a "./" is fixed by clearing the <code>principal name</code>value,
+   * setting the <code>reference</code> value to the <code>name</code> value and changing the
+   * <code>name</code> value to a name with the following pattern:
+   * <code>SERVICE_COMPONENT_IDENTITY</code>
+   * <p>
+   * For example, if the identity is for the "SERVICE1" service and is a reference to "HDFS/NAMENODE/hdfs";
+   * then the name is set to "<code>service1_hdfs</code>"
+   * <p>
+   * For example, if the identity is for the "COMPONENT21" component of the "SERVICE2" service and is a reference to "HDFS/NAMENODE/hdfs";
+   * then the name is set to "<code>service2_component21_hdfs</code>"
+   * <p>
+   * Once the identity descriptor properties of the identity are fixed, the relevant configuration
+   * value is fixed to match the value if the referenced identity. This may lead to a new version
+   * of the relevant configuration type.
+   *
+   * @param cluster   the cluster
+   * @param container the current Kerberos descriptor container
+   * @throws AmbariException if an error occurs
+   */
+  private void fixIdentityReferences(Cluster cluster, AbstractKerberosDescriptorContainer container)
+      throws AmbariException {
+    List<KerberosIdentityDescriptor> identities = container.getIdentities();
+    if (identities != null) {
+      for (KerberosIdentityDescriptor identity : identities) {
+        String name = identity.getName();
+
+        if (!StringUtils.isEmpty(name) && (name.startsWith("/") || name.startsWith("./"))) {
+          String[] parts = name.split("/");
+          String newName = buildName(identity.getParent(), parts[parts.length - 1]);
+
+          identity.setName(newName);
+          identity.setReference(name);
+        }
+
+        String identityReference = identity.getReference();
+        if (!StringUtils.isEmpty(identityReference)) {
+          // If this identity references another identity:
+          //  * The principal name needs to be the same as the referenced identity
+          //    - ensure that no principal name is being set for this identity
+          //  * Any configuration set to contain the reference principal name needs to be fixed to
+          //    be the correct principal name
+          KerberosPrincipalDescriptor principal = identity.getPrincipalDescriptor();
+          if (principal != null) {
+            // Fix the value
+            principal.setValue(null);
+
+            // Fix the relative configuration
+            if (!StringUtils.isEmpty(principal.getConfiguration())) {
+              String referencedPrincipalName = getConfiguredPrincipalNameFromReference(cluster, container, identityReference);
+
+              if(!StringUtils.isEmpty(referencedPrincipalName)) {
+                String[] parts = principal.getConfiguration().split("/");
+                if (parts.length == 2) {
+                  String type = parts[0];
+                  String property = parts[1];
+
+                  updateConfigurationPropertiesForCluster(cluster,
+                      type,
+                      Collections.singletonMap(property, referencedPrincipalName),
+                      true,
+                      false);
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+
+    if (container instanceof KerberosDescriptor) {
+      Map<String, KerberosServiceDescriptor> services = ((KerberosDescriptor) container).getServices();
+      if (services != null) {
+        for (KerberosServiceDescriptor serviceDescriptor : services.values()) {
+          fixIdentityReferences(cluster, serviceDescriptor);
+        }
+      }
+    } else if (container instanceof KerberosServiceDescriptor) {
+      Map<String, KerberosComponentDescriptor> components = ((KerberosServiceDescriptor) container).getComponents();
+      if (components != null) {
+        for (KerberosComponentDescriptor componentDescriptor : components.values()) {
+          fixIdentityReferences(cluster, componentDescriptor);
+        }
+      }
+    }
+  }
+
+  /**
+   * Finds the value of the configuration found for the principal in the referenced identity
+   * descriptor.
+   *
+   * @param cluster           the cluster
+   * @param container         the current {@link KerberosIdentityDescriptor}, ideally the identity's parent descriptor
+   * @param identityReference the path to the referenced identity
+   * @return the value of the configuration specified in the referenced identity's principal descriptor
+   * @throws AmbariException if an error occurs
+   */
+  private String getConfiguredPrincipalNameFromReference(Cluster cluster,
+                                                         AbstractKerberosDescriptorContainer container,
+                                                         String identityReference)
+      throws AmbariException {
+    KerberosIdentityDescriptor identityDescriptor = container.getReferencedIdentityDescriptor(identityReference);
+
+    if (identityDescriptor != null) {
+      KerberosPrincipalDescriptor principal = identityDescriptor.getPrincipalDescriptor();
+      if ((principal != null) && (!StringUtils.isEmpty(principal.getConfiguration()))) {
+        String[] parts = principal.getConfiguration().split("/");
+        if (parts.length == 2) {
+          String type = parts[0];
+          String property = parts[1];
+
+          Config config = cluster.getDesiredConfigByType(type);
+
+          if (config != null) {
+            return config.getProperties().get(property);
+          }
+        }
+      }
+    }
+
+    return null;
+  }
+
+  /**
+   * Builds the name of an identity based on the identity's container and the referenced identity's name.
+   * <p>
+   * The calculated name will be in the following format and converted to all lowercase characters:
+   * <code>SERVICE_COMPONENT_IDENTITY</code>
+   *
+   * @param container    the current {@link KerberosIdentityDescriptor}, ideally the identity's parent descriptor
+   * @param identityName the referenced identity's name
+   * @return a name
+   */
+  private String buildName(AbstractKerberosDescriptor container, String identityName) {
+    if (container instanceof KerberosServiceDescriptor) {
+      return container.getName().toLowerCase() + "_" + identityName;
+    } else if (container instanceof KerberosComponentDescriptor) {
+      return container.getParent().getName().toLowerCase() + "_" + container.getName().toLowerCase() + "_" + identityName;
+    } else {
+      return identityName;
+    }
+  }
+
+  /**
+   * Sets all existing repository versions to be resolved (we have to assume
+   * that they are good since they've been using them to run stuff).
+   *
+   * @throws AmbariException
+   */
+  protected void updateExistingRepositoriesToBeResolved() throws AmbariException {
+    RepositoryVersionDAO repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
+    List<RepositoryVersionEntity> repositoryVersions = repositoryVersionDAO.findAll();
+    for (RepositoryVersionEntity repositoryVersion : repositoryVersions) {
+      repositoryVersion.setResolved(true);
+      repositoryVersionDAO.merge(repositoryVersion);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutor.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutor.java
new file mode 100644
index 0000000..3979c0e
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutor.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.utils;
+
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * ThreadPoolExecutor extension which is stopped by default and can be started & stopped.
+ */
+public class ManagedThreadPoolExecutor extends ThreadPoolExecutor {
+
+  private volatile boolean isStopped;
+  private final ReentrantLock pauseLock = new ReentrantLock();
+  private final Condition unpaused = pauseLock.newCondition();
+
+  public ManagedThreadPoolExecutor(int corePoolSize, int maximumPoolSize,
+                            long keepAliveTime, TimeUnit unit,
+                            BlockingQueue<Runnable> workQueue) {
+    super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue,
+            Executors.defaultThreadFactory());
+    isStopped = true;
+  }
+
+  protected void beforeExecute(Thread t, Runnable r) {
+    super.beforeExecute(t, r);
+    pauseLock.lock();
+    try {
+      while (isStopped) {
+        unpaused.await();
+      }
+    } catch (InterruptedException ie) {
+      t.interrupt();
+    } finally {
+      pauseLock.unlock();
+    }
+  }
+
+  public void start() {
+    pauseLock.lock();
+    try {
+      isStopped = false;
+      unpaused.signalAll();
+    } finally {
+      pauseLock.unlock();
+    }
+  }
+
+  public void stop() {
+    pauseLock.lock();
+    try {
+      isStopped = true;
+    } finally {
+      pauseLock.unlock();
+    }
+  }
+
+  public boolean isRunning() {
+    return !isStopped;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/resources/Ambari-DDL-AzureDB-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-AzureDB-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-AzureDB-CREATE.sql
index 8f0cb67..7c004b3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-AzureDB-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-AzureDB-CREATE.sql
@@ -222,6 +222,7 @@ CREATE TABLE repo_version (
   repositories VARCHAR(MAX) NOT NULL,
   repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL,
   hidden SMALLINT NOT NULL DEFAULT 0,
+  resolved BIT NOT NULL DEFAULT 0,
   version_url VARCHAR(1024),
   version_xml VARCHAR(MAX),
   version_xsd VARCHAR(512),
@@ -1169,7 +1170,7 @@ CREATE TABLE upgrade_item (
   state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
   hosts TEXT,
   tasks TEXT,
-  item_text VARCHAR(1024),
+  item_text TEXT,
   CONSTRAINT PK_upgrade_item PRIMARY KEY CLUSTERED (upgrade_item_id),
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 5b03df5..28a7624 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -155,6 +155,7 @@ CREATE TABLE repo_version (
   repositories VARCHAR(3000) NOT NULL,
   repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL,
   hidden SMALLINT NOT NULL DEFAULT 0,
+  resolved SMALLINT NOT NULL DEFAULT 0,
   version_url VARCHAR(1024),
   version_xml CLOB,
   version_xsd VARCHAR(512),
@@ -844,7 +845,7 @@ CREATE TABLE upgrade_item (
   state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
   hosts VARCHAR(3000),
   tasks VARCHAR(3000),
-  item_text VARCHAR(1024),
+  item_text VARCHAR(3000),
   CONSTRAINT PK_upgrade_item PRIMARY KEY (upgrade_item_id),
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 4d49dca..e7774e9 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -156,6 +156,7 @@ CREATE TABLE repo_version (
   repositories CLOB NOT NULL,
   repo_type VARCHAR2(255) DEFAULT 'STANDARD' NOT NULL,
   hidden NUMBER(1) DEFAULT 0 NOT NULL,
+  resolved NUMBER(1) DEFAULT 0 NOT NULL,
   version_url VARCHAR(1024),
   version_xml CLOB,
   version_xsd VARCHAR(512),
@@ -840,7 +841,7 @@ CREATE TABLE upgrade_item (
   state VARCHAR2(255) DEFAULT 'NONE' NOT NULL,
   hosts CLOB,
   tasks CLOB,
-  item_text VARCHAR2(1024),
+  item_text CLOB,
   CONSTRAINT PK_upgrade_item PRIMARY KEY (upgrade_item_id),
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 4d084e8..6e7c172 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -159,6 +159,7 @@ CREATE TABLE repo_version (
   version_xml TEXT,
   version_xsd VARCHAR(512),
   parent_id BIGINT,
+  resolved SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT PK_repo_version PRIMARY KEY (repo_version_id),
   CONSTRAINT FK_repoversion_stack_id FOREIGN KEY (stack_id) REFERENCES stack(stack_id),
   CONSTRAINT UQ_repo_version_display_name UNIQUE (display_name),
@@ -842,7 +843,7 @@ CREATE TABLE upgrade_item (
   state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
   hosts TEXT,
   tasks TEXT,
-  item_text VARCHAR(1024),
+  item_text TEXT,
   CONSTRAINT PK_upgrade_item PRIMARY KEY (upgrade_item_id),
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index a19ca73..a64856d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -154,6 +154,7 @@ CREATE TABLE repo_version (
   repositories TEXT NOT NULL,
   repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL,
   hidden SMALLINT NOT NULL DEFAULT 0,
+  resolved BIT NOT NULL DEFAULT 0,
   version_url VARCHAR(1024),
   version_xml TEXT,
   version_xsd VARCHAR(512),
@@ -838,7 +839,7 @@ CREATE TABLE upgrade_item (
   state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
   hosts TEXT,
   tasks TEXT,
-  item_text VARCHAR(1024),
+  item_text TEXT,
   CONSTRAINT PK_upgrade_item PRIMARY KEY (upgrade_item_id),
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 96fd7fc..c0f36c0 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -168,6 +168,7 @@ CREATE TABLE repo_version (
   repositories VARCHAR(MAX) NOT NULL,
   repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL,
   hidden SMALLINT NOT NULL DEFAULT 0,
+  resolved BIT NOT NULL DEFAULT 0,
   version_url VARCHAR(1024),
   version_xml VARCHAR(MAX),
   version_xsd VARCHAR(512),
@@ -859,7 +860,7 @@ CREATE TABLE upgrade_item (
   state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
   hosts TEXT,
   tasks TEXT,
-  item_text VARCHAR(1024),
+  item_text TEXT,
   CONSTRAINT PK_upgrade_item PRIMARY KEY CLUSTERED (upgrade_item_id),
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index c449aae..94799cc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -132,7 +132,7 @@ public class TestActionDBAccessorImpl {
   @Test
   public void testActionResponse() throws AmbariException {
     String hostname = "host1";
-    populateActionDB(db, hostname, requestId, stageId);
+    populateActionDB(db, hostname, requestId, stageId, false);
     Stage stage = db.getAllStages(requestId).get(0);
     Assert.assertEquals(stageId, stage.getStageId());
     stage.setHostRoleStatus(hostname, "HBASE_MASTER", HostRoleStatus.QUEUED);
@@ -160,7 +160,7 @@ public class TestActionDBAccessorImpl {
   @Test
   public void testCancelCommandReport() throws AmbariException {
     String hostname = "host1";
-    populateActionDB(db, hostname, requestId, stageId);
+    populateActionDB(db, hostname, requestId, stageId, false);
     Stage stage = db.getAllStages(requestId).get(0);
     Assert.assertEquals(stageId, stage.getStageId());
     stage.setHostRoleStatus(hostname, "HBASE_MASTER", HostRoleStatus.ABORTED);
@@ -191,8 +191,8 @@ public class TestActionDBAccessorImpl {
   @Test
   public void testGetStagesInProgress() throws AmbariException {
     List<Stage> stages = new ArrayList<>();
-    stages.add(createStubStage(hostName, requestId, stageId));
-    stages.add(createStubStage(hostName, requestId, stageId + 1));
+    stages.add(createStubStage(hostName, requestId, stageId, false));
+    stages.add(createStubStage(hostName, requestId, stageId + 1, false));
     Request request = new Request(stages, "", clusters);
     db.persistActions(request);
     assertEquals(2, stages.size());
@@ -200,8 +200,8 @@ public class TestActionDBAccessorImpl {
 
   @Test
   public void testGetStagesInProgressWithFailures() throws AmbariException {
-    populateActionDB(db, hostName, requestId, stageId);
-    populateActionDB(db, hostName, requestId + 1, stageId);
+    populateActionDB(db, hostName, requestId, stageId, false);
+    populateActionDB(db, hostName, requestId + 1, stageId, false);
     List<Stage> stages = db.getFirstStageInProgressPerRequest();
     assertEquals(2, stages.size());
 
@@ -289,7 +289,7 @@ public class TestActionDBAccessorImpl {
 
   @Test
   public void testPersistActions() throws AmbariException {
-    populateActionDB(db, hostName, requestId, stageId);
+    populateActionDB(db, hostName, requestId, stageId, false);
     for (Stage stage : db.getAllStages(requestId)) {
       log.info("taskId={}" + stage.getExecutionCommands(hostName).get(0).
           getExecutionCommand().getTaskId());
@@ -302,7 +302,7 @@ public class TestActionDBAccessorImpl {
 
   @Test
   public void testHostRoleScheduled() throws InterruptedException, AmbariException {
-    populateActionDB(db, hostName, requestId, stageId);
+    populateActionDB(db, hostName, requestId, stageId, false);
     Stage stage = db.getStage(StageUtils.getActionId(requestId, stageId));
     assertEquals(HostRoleStatus.PENDING, stage.getHostRoleStatus(hostName, Role.HBASE_MASTER.toString()));
     List<HostRoleCommandEntity> entities=
@@ -421,7 +421,7 @@ public class TestActionDBAccessorImpl {
 
   @Test
   public void testUpdateHostRole() throws Exception {
-    populateActionDB(db, hostName, requestId, stageId);
+    populateActionDB(db, hostName, requestId, stageId, false);
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < 50000; i++) {
       sb.append("1234567890");
@@ -452,13 +452,36 @@ public class TestActionDBAccessorImpl {
   }
 
   @Test
+  public void testUpdateHostRoleTimeoutRetry() throws Exception {
+    populateActionDB(db, hostName, requestId, stageId, true);
+
+    CommandReport commandReport = new CommandReport();
+    commandReport.setStatus(HostRoleStatus.TIMEDOUT.toString());
+    commandReport.setStdOut("");
+    commandReport.setStdErr("");
+    commandReport.setStructuredOut("");
+    commandReport.setExitCode(123);
+    db.updateHostRoleState(hostName, requestId, stageId, Role.HBASE_MASTER.toString(), commandReport);
+
+    List<HostRoleCommandEntity> commandEntities =
+      hostRoleCommandDAO.findByHostRole(hostName, requestId, stageId, Role.HBASE_MASTER.toString());
+
+    HostRoleCommandEntity commandEntity = commandEntities.get(0);
+    HostRoleCommand command = db.getTask(commandEntity.getTaskId());
+    assertNotNull(command);
+    assertEquals(HostRoleStatus.HOLDING_TIMEDOUT, command.getStatus());
+
+  }
+
+
+  @Test
   public void testGetRequestsByStatus() throws AmbariException {
     List<Long> requestIds = new ArrayList<>();
     requestIds.add(requestId + 1);
     requestIds.add(requestId);
-    populateActionDB(db, hostName, requestId, stageId);
+    populateActionDB(db, hostName, requestId, stageId, false);
     clusters.addHost("host2");
-    populateActionDB(db, hostName, requestId + 1, stageId);
+    populateActionDB(db, hostName, requestId + 1, stageId, false);
     List<Long> requestIdsResult =
       db.getRequestsByStatus(null, BaseRequest.DEFAULT_PAGE_SIZE, false);
 
@@ -508,7 +531,7 @@ public class TestActionDBAccessorImpl {
     }
 
     for (Long id : ids) {
-      populateActionDB(db, hostName, id, stageId);
+      populateActionDB(db, hostName, id, stageId, false);
     }
 
     List<Long> expected = null;
@@ -617,7 +640,7 @@ public class TestActionDBAccessorImpl {
   @Test
   public void testEntitiesCreatedWithIDs() throws Exception {
     List<Stage> stages = new ArrayList<>();
-    Stage stage = createStubStage(hostName, requestId, stageId);
+    Stage stage = createStubStage(hostName, requestId, stageId, false);
 
     stages.add(stage);
 
@@ -707,8 +730,8 @@ public class TestActionDBAccessorImpl {
   }
 
   private void populateActionDB(ActionDBAccessor db, String hostname,
-      long requestId, long stageId) throws AmbariException {
-    Stage s = createStubStage(hostname, requestId, stageId);
+      long requestId, long stageId, boolean retryAllowed) throws AmbariException {
+    Stage s = createStubStage(hostname, requestId, stageId, retryAllowed);
     List<Stage> stages = new ArrayList<>();
     stages.add(s);
     Request request = new Request(stages, "", clusters);
@@ -721,7 +744,7 @@ public class TestActionDBAccessorImpl {
 
     List<Stage> stages = new ArrayList<>();
     for (int i = 0; i < numberOfStages; i++) {
-      Stage stage = createStubStage(hostname, requestId, stageId + i);
+      Stage stage = createStubStage(hostname, requestId, stageId + i, false);
       stages.add(stage);
     }
 
@@ -732,7 +755,7 @@ public class TestActionDBAccessorImpl {
   private void populateActionDBWithCompletedRequest(ActionDBAccessor db, String hostname,
       long requestId, long stageId) throws AmbariException {
 
-    Stage s = createStubStage(hostname, requestId, stageId);
+    Stage s = createStubStage(hostname, requestId, stageId, false);
     List<Stage> stages = new ArrayList<>();
     stages.add(s);
     Request request = new Request(stages, "", clusters);
@@ -745,7 +768,7 @@ public class TestActionDBAccessorImpl {
   private void populateActionDBWithPartiallyCompletedRequest(ActionDBAccessor db, String hostname,
       long requestId, long stageId) throws AmbariException {
 
-    Stage s = createStubStage(hostname, requestId, stageId);
+    Stage s = createStubStage(hostname, requestId, stageId, false);
     List<Stage> stages = new ArrayList<>();
     stages.add(s);
 
@@ -756,14 +779,14 @@ public class TestActionDBAccessorImpl {
     db.persistActions(request);
   }
 
-  private Stage createStubStage(String hostname, long requestId, long stageId) {
+  private Stage createStubStage(String hostname, long requestId, long stageId, boolean retryAllowed) {
     Stage s = stageFactory.createNew(requestId, "/a/b", "cluster1", 1L, "action db accessor test",
       "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(hostname, Role.HBASE_MASTER,
         RoleCommand.START,
         new ServiceComponentHostStartEvent(Role.HBASE_MASTER.toString(),
-            hostname, System.currentTimeMillis()), "cluster1", "HBASE", false, false);
+            hostname, System.currentTimeMillis()), "cluster1", "HBASE", retryAllowed, false);
     s.addHostRoleExecutionCommand(
         hostname,
         Role.HBASE_REGIONSERVER,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index 92b8429..71da8a9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -30,6 +30,7 @@ import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HDFS;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HDFS_CLIENT;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.NAMENODE;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.SECONDARY_NAMENODE;
+import static org.apache.ambari.server.controller.KerberosHelperImpl.SET_KEYTAB;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
@@ -1474,7 +1475,7 @@ public class TestHeartbeatHandler {
     ExecutionCommand executionCommand = new ExecutionCommand();
 
     Map<String, String> hlp = new HashMap<>();
-    hlp.put("custom_command", "SET_KEYTAB");
+    hlp.put("custom_command", SET_KEYTAB);
     executionCommand.setHostLevelParams(hlp);
 
     Map<String, String> commandparams = new HashMap<>();
@@ -1547,7 +1548,7 @@ public class TestHeartbeatHandler {
     kerberosIdentityDataFileWriter.writeRecord("c6403.ambari.apache.org", "HDFS", "DATANODE",
         "dn/_HOST@_REALM", "service",
         "/etc/security/keytabs/dn.service.keytab",
-        "hdfs", "r", "hadoop", "", "false");
+        "hdfs", "r", "hadoop", "", "false", "false");
 
     kerberosIdentityDataFileWriter.close();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
index b72dff2..3bd5fac 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
@@ -17,6 +17,8 @@
  */
 package org.apache.ambari.server.agent;
 
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -35,6 +37,7 @@ import org.apache.ambari.server.H2DatabaseCleaner;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
@@ -55,6 +58,8 @@ import org.apache.ambari.server.state.svccomphost.ServiceComponentHostDisableEve
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
+import org.apache.ambari.server.topology.TopologyManager;
+import org.apache.ambari.server.utils.StageUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -86,6 +91,8 @@ public class TestHeartbeatMonitor {
     injector.getInstance(GuiceJpaInitializer.class);
     helper = injector.getInstance(OrmTestHelper.class);
     ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    StageUtils.setTopologyManager(injector.getInstance(TopologyManager.class));
+    StageUtils.setConfiguration(injector.getInstance(Configuration.class));
   }
 
   @After
@@ -197,6 +204,8 @@ public class TestHeartbeatMonitor {
     hb.setResponseId(12);
     handler.handleHeartBeat(hb);
 
+    hm.getAgentRequests().setExecutionDetailsRequest(hostname1, "DATANODE", Boolean.TRUE.toString());
+
     List<StatusCommand> cmds = hm.generateStatusCommands(hostname1);
     assertTrue("HeartbeatMonitor should generate StatusCommands for host1", cmds.size() == 3);
     assertEquals("HDFS", cmds.get(0).getServiceName());
@@ -205,10 +214,19 @@ public class TestHeartbeatMonitor {
     boolean  containsSECONDARY_NAMENODEStatus = false;
 
     for (StatusCommand cmd : cmds) {
-      containsDATANODEStatus |= cmd.getComponentName().equals("DATANODE");
+      boolean isDataNode = cmd.getComponentName().equals("DATANODE");
+      containsDATANODEStatus |= isDataNode;
       containsNAMENODEStatus |= cmd.getComponentName().equals("NAMENODE");
       containsSECONDARY_NAMENODEStatus |= cmd.getComponentName().equals("SECONDARY_NAMENODE");
       assertTrue(cmd.getConfigurations().size() > 0);
+
+      ExecutionCommand execCmd = cmd.getExecutionCommand();
+      assertEquals(isDataNode, execCmd != null);
+      if (execCmd != null) {
+        Map<String, String> commandParams = execCmd.getCommandParams();
+        assertTrue(SERVICE_PACKAGE_FOLDER + " should be included", commandParams.containsKey(SERVICE_PACKAGE_FOLDER));
+        assertTrue(HOOKS_FOLDER + " should be included", commandParams.containsKey(HOOKS_FOLDER));
+      }
     }
 
     assertEquals(true, containsDATANODEStatus);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
index ce7b783..3db174c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
@@ -19,10 +19,14 @@ package org.apache.ambari.server.checks;
 
 
 import static com.google.common.collect.Lists.newArrayList;
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyLong;
+import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.anyString;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -32,16 +36,21 @@ import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
 
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
 import org.apache.ambari.server.stack.StackManagerFactory;
+import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -541,4 +550,79 @@ public class DatabaseConsistencyCheckHelperTest {
 
     easyMockSupport.verifyAll();
   }
+
+  @Test
+  public void testFixConfigsSelectedMoreThanOnce() throws Exception {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+    final Connection mockConnection = easyMockSupport.createNiceMock(Connection.class);
+    final ClusterDAO clusterDAO = easyMockSupport.createNiceMock(ClusterDAO.class);
+    final DBAccessor mockDBDbAccessor = easyMockSupport.createNiceMock(DBAccessor.class);
+
+    final EntityManager mockEntityManager = easyMockSupport.createNiceMock(EntityManager.class);
+    final Clusters mockClusters = easyMockSupport.createNiceMock(Clusters.class);
+    final ResultSet mockResultSet = easyMockSupport.createNiceMock(ResultSet.class);
+    final Statement mockStatement = easyMockSupport.createNiceMock(Statement.class);
+
+    final StackManagerFactory mockStackManagerFactory = easyMockSupport.createNiceMock(StackManagerFactory.class);
+    final OsFamily mockOSFamily = easyMockSupport.createNiceMock(OsFamily.class);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(EntityManager.class).toInstance(mockEntityManager);
+        bind(Clusters.class).toInstance(mockClusters);
+        bind(ClusterDAO.class).toInstance(clusterDAO);
+        bind(DBAccessor.class).toInstance(mockDBDbAccessor);
+        bind(StackManagerFactory.class).toInstance(mockStackManagerFactory);
+        bind(OsFamily.class).toInstance(mockOSFamily);
+      }
+    });
+
+
+    expect(mockConnection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)).andReturn(mockStatement);
+    expect(mockStatement.executeQuery("select c.cluster_name, cc.type_name from clusterconfig cc " +
+        "join clusters c on cc.cluster_id=c.cluster_id " +
+        "group by c.cluster_name, cc.type_name " +
+        "having sum(cc.selected) > 1")).andReturn(mockResultSet);
+    expect(mockResultSet.next()).andReturn(true).once();
+    expect(mockResultSet.getString("cluster_name")).andReturn("123").once();
+    expect(mockResultSet.getString("type_name")).andReturn("type1").once();
+    expect(mockResultSet.next()).andReturn(false).once();
+
+    Cluster clusterMock = easyMockSupport.createNiceMock(Cluster.class);
+    expect(mockClusters.getCluster("123")).andReturn(clusterMock);
+
+    expect(clusterMock.getClusterId()).andReturn(123L).once();
+
+    ClusterConfigEntity clusterConfigEntity1 = easyMockSupport.createNiceMock(ClusterConfigEntity.class);
+    ClusterConfigEntity clusterConfigEntity2 = easyMockSupport.createNiceMock(ClusterConfigEntity.class);
+    expect(clusterConfigEntity1.getType()).andReturn("type1").anyTimes();
+    expect(clusterConfigEntity1.getSelectedTimestamp()).andReturn(123L);
+    clusterConfigEntity1.setSelected(false);
+    expectLastCall().once();
+
+    expect(clusterConfigEntity2.getType()).andReturn("type1").anyTimes();
+    expect(clusterConfigEntity2.getSelectedTimestamp()).andReturn(321L);
+    clusterConfigEntity2.setSelected(false);
+    expectLastCall().once();
+    clusterConfigEntity2.setSelected(true);
+    expectLastCall().once();
+
+    TypedQuery queryMock = easyMockSupport.createNiceMock(TypedQuery.class);
+    expect(mockEntityManager.createNamedQuery(anyString(), anyObject(Class.class))).andReturn(queryMock).anyTimes();
+    expect(queryMock.setParameter(anyString(), anyString())).andReturn(queryMock).once();
+    expect(queryMock.setParameter(anyString(), anyLong())).andReturn(queryMock).once();
+    expect(queryMock.getResultList()).andReturn(Arrays.asList(clusterConfigEntity1, clusterConfigEntity2)).once();
+    expect(clusterDAO.merge(anyObject(ClusterConfigEntity.class), anyBoolean())).andReturn(null).times(3);
+
+    DatabaseConsistencyCheckHelper.setInjector(mockInjector);
+    DatabaseConsistencyCheckHelper.setConnection(mockConnection);
+
+    easyMockSupport.replayAll();
+
+    DatabaseConsistencyCheckHelper.fixConfigsSelectedMoreThanOnce();
+
+    easyMockSupport.verifyAll();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheckTest.java
new file mode 100644
index 0000000..d88c9a1
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheckTest.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.repository.ClusterVersionSummary;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.runners.MockitoJUnitRunner;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.inject.Provider;
+
+/**
+ * Unit tests for SecondaryNamenodeDeletedCheck
+ *
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class DruidHighAvailabilityCheckTest
+{
+  private final Clusters clusters = Mockito.mock(Clusters.class);
+
+  private final DruidHighAvailabilityCheck druidHighAvailabilityCheck = new DruidHighAvailabilityCheck();
+
+  @Mock
+  private ClusterVersionSummary m_clusterVersionSummary;
+
+  @Mock
+  private VersionDefinitionXml m_vdfXml;
+
+  @Mock
+  private RepositoryVersionEntity m_repositoryVersion;
+
+  final Map<String, Service> m_services = new HashMap<>();
+
+  @Before
+  public void setup() throws Exception {
+    druidHighAvailabilityCheck.clustersProvider = new Provider<Clusters>() {
+      @Override
+      public Clusters get() {
+        return clusters;
+      }
+    };
+
+    druidHighAvailabilityCheck.ambariMetaInfo = new Provider<AmbariMetaInfo>() {
+      @Override
+      public AmbariMetaInfo get() {
+        return Mockito.mock(AmbariMetaInfo.class);
+      }
+    };
+
+    Configuration config = Mockito.mock(Configuration.class);
+    druidHighAvailabilityCheck.config = config;
+
+    m_services.clear();
+    Mockito.when(m_repositoryVersion.getRepositoryXml()).thenReturn(m_vdfXml);
+    Mockito.when(m_vdfXml.getClusterSummary(Mockito.any(Cluster.class))).thenReturn(m_clusterVersionSummary);
+    Mockito.when(m_clusterVersionSummary.getAvailableServiceNames()).thenReturn(m_services.keySet());
+  }
+
+  @Test
+  public void testIsApplicable() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    final Service service = Mockito.mock(Service.class);
+
+    m_services.put("DRUID", service);
+
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(cluster.getServices()).thenReturn(m_services);
+    Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
+
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setTargetRepositoryVersion(m_repositoryVersion);
+
+    Assert.assertTrue(druidHighAvailabilityCheck.isApplicable(request));
+
+    request = new PrereqCheckRequest("cluster");
+    request.setTargetRepositoryVersion(m_repositoryVersion);
+
+    request.addResult(CheckDescription.DRUID_HA_WARNING, PrereqCheckStatus.PASS);
+    Assert.assertTrue(druidHighAvailabilityCheck.isApplicable(request));
+
+    m_services.remove("DRUID");
+    Assert.assertFalse(druidHighAvailabilityCheck.isApplicable(request));
+  }
+
+  @Test
+  public void testPerform() throws Exception {
+    final ServiceComponentHost serviceComponentHost= Mockito.mock(ServiceComponentHost.class);
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
+
+    final Service service = Mockito.mock(Service.class);
+    final ServiceComponent serviceComponent = Mockito.mock(ServiceComponent.class);
+    final ServiceComponent haComponent = Mockito.mock(ServiceComponent.class);
+    Mockito.when(serviceComponent.getServiceComponentHosts()).thenReturn(Collections.singletonMap("host", null));
+    Mockito.when(haComponent.getServiceComponentHosts()).thenReturn(ImmutableMap.<String,ServiceComponentHost>of("host1", serviceComponentHost, "host2", serviceComponentHost));
+
+    // All Components Not HA
+    Mockito.when(cluster.getService("DRUID")).thenReturn(service);
+    Mockito.when(service.getServiceComponent("DRUID_COORDINATOR")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_BROKER")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_MIDDLEMANAGER")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_HISTORICAL")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_OVERLORD")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_ROUTER")).thenReturn(serviceComponent);
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    druidHighAvailabilityCheck.perform(check, new PrereqCheckRequest("cluster"));
+    Assert.assertEquals(PrereqCheckStatus.WARNING, check.getStatus());
+    Assert.assertEquals("DRUID", check.getFailedOn().toArray(new String[1])[0]);
+    Assert.assertEquals("High Availability is not enabled for Druid. Druid Service may have some downtime during upgrade. Deploy multiple instances of DRUID_BROKER, DRUID_COORDINATOR, DRUID_HISTORICAL, DRUID_OVERLORD, DRUID_MIDDLEMANAGER, DRUID_ROUTER in the Cluster to avoid any downtime.", check.getFailReason());
+
+    // Some Components have HA
+    Mockito.when(cluster.getService("DRUID")).thenReturn(service);
+    Mockito.when(service.getServiceComponent("DRUID_COORDINATOR")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_BROKER")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_MIDDLEMANAGER")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_HISTORICAL")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_OVERLORD")).thenReturn(serviceComponent);
+    Mockito.when(service.getServiceComponent("DRUID_ROUTER")).thenReturn(haComponent);
+    check = new PrerequisiteCheck(null, null);
+    druidHighAvailabilityCheck.perform(check, new PrereqCheckRequest("cluster"));
+    Assert.assertEquals(PrereqCheckStatus.WARNING, check.getStatus());
+    Assert.assertEquals("DRUID", check.getFailedOn().toArray(new String[1])[0]);
+    Assert.assertEquals("High Availability is not enabled for Druid. Druid Service may have some downtime during upgrade. Deploy multiple instances of DRUID_COORDINATOR, DRUID_OVERLORD, DRUID_MIDDLEMANAGER in the Cluster to avoid any downtime.", check.getFailReason());
+
+    // All components have HA
+    Mockito.when(cluster.getService("DRUID")).thenReturn(service);
+    Mockito.when(service.getServiceComponent("DRUID_COORDINATOR")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_BROKER")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_MIDDLEMANAGER")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_HISTORICAL")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_OVERLORD")).thenReturn(haComponent);
+    Mockito.when(service.getServiceComponent("DRUID_ROUTER")).thenReturn(haComponent);
+
+
+    check = new PrerequisiteCheck(null, null);
+    druidHighAvailabilityCheck.perform(check, new PrereqCheckRequest("cluster"));
+    Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
index 55eeb4e..ff585fc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServiceCheckValidityCheckTest.java
@@ -28,14 +28,12 @@ import java.util.Collections;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO.LastServiceCheckDTO;
 import org.apache.ambari.server.orm.dao.ServiceConfigDAO;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.ServiceConfigEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -58,7 +56,6 @@ public class ServiceCheckValidityCheckTest {
   private static final long CLUSTER_ID = 1L;
   private static final String SERVICE_NAME = "HDFS";
   private static final long CONFIG_CREATE_TIMESTAMP = 1461518722202L;
-  private static final String COMMAND_DETAIL = "HDFS service check";
   private static final long SERVICE_CHECK_START_TIME = CONFIG_CREATE_TIMESTAMP - 2000L;
   private static final String SERVICE_COMPONENT_NAME = "service component";
   private ServiceCheckValidityCheck serviceCheckValidityCheck;
@@ -67,6 +64,7 @@ public class ServiceCheckValidityCheckTest {
   private HostRoleCommandDAO hostRoleCommandDAO;
   private Service service;
   private AmbariMetaInfo ambariMetaInfo;
+  private ActionMetadata actionMetadata;
 
   @Before
   public void setUp() throws Exception {
@@ -75,6 +73,7 @@ public class ServiceCheckValidityCheckTest {
     serviceConfigDAO = mock(ServiceConfigDAO.class);
     hostRoleCommandDAO = mock(HostRoleCommandDAO.class);
     ambariMetaInfo = mock(AmbariMetaInfo.class);
+    actionMetadata = new ActionMetadata();
 
     serviceCheckValidityCheck = new ServiceCheckValidityCheck();
     serviceCheckValidityCheck.hostRoleCommandDAOProvider = new Provider<HostRoleCommandDAO>() {
@@ -95,6 +94,12 @@ public class ServiceCheckValidityCheckTest {
         return clusters;
       }
     };
+    serviceCheckValidityCheck.actionMetadataProvider = new Provider<ActionMetadata>() {
+      @Override
+      public ActionMetadata get() {
+        return actionMetadata;
+      }
+    };
 
     Cluster cluster = mock(Cluster.class);
     when(clusters.getCluster(CLUSTER_NAME)).thenReturn(cluster);
@@ -114,6 +119,8 @@ public class ServiceCheckValidityCheckTest {
 
     when(ambariMetaInfo.isServiceWithNoConfigs(Mockito.anyString(), Mockito.anyString(),
         Mockito.anyString())).thenReturn(false);
+
+    actionMetadata.addServiceCheckAction("HDFS");
   }
 
   @Test
@@ -128,20 +135,11 @@ public class ServiceCheckValidityCheckTest {
     serviceConfigEntity.setServiceName(SERVICE_NAME);
     serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
 
-    HostRoleCommandEntity hostRoleCommandEntity1 = new HostRoleCommandEntity();
-    hostRoleCommandEntity1.setRoleCommand(RoleCommand.SERVICE_CHECK);
-    hostRoleCommandEntity1.setCommandDetail(null);
-    hostRoleCommandEntity1.setStartTime(SERVICE_CHECK_START_TIME);
-    hostRoleCommandEntity1.setRole(Role.ZOOKEEPER_SERVER);
-
-    HostRoleCommandEntity hostRoleCommandEntity2 = new HostRoleCommandEntity();
-    hostRoleCommandEntity2.setRoleCommand(RoleCommand.SERVICE_CHECK);
-    hostRoleCommandEntity2.setCommandDetail(COMMAND_DETAIL);
-    hostRoleCommandEntity2.setStartTime(SERVICE_CHECK_START_TIME);
-    hostRoleCommandEntity2.setRole(Role.HDFS_SERVICE_CHECK);
+    LastServiceCheckDTO lastServiceCheckDTO1 = new LastServiceCheckDTO(Role.ZOOKEEPER_QUORUM_SERVICE_CHECK.name(), SERVICE_CHECK_START_TIME);
+    LastServiceCheckDTO lastServiceCheckDTO2 = new LastServiceCheckDTO(Role.HDFS_SERVICE_CHECK.name(), SERVICE_CHECK_START_TIME);
 
     when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
-    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
+    when(hostRoleCommandDAO.getLatestServiceChecksByRole(any(Long.class))).thenReturn(asList(lastServiceCheckDTO1, lastServiceCheckDTO2));
 
     PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
     try {
@@ -164,14 +162,10 @@ public class ServiceCheckValidityCheckTest {
     serviceConfigEntity.setServiceName(SERVICE_NAME);
     serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
 
-    HostRoleCommandEntity hostRoleCommandEntity = new HostRoleCommandEntity();
-    hostRoleCommandEntity.setRoleCommand(RoleCommand.SERVICE_CHECK);
-    hostRoleCommandEntity.setCommandDetail(COMMAND_DETAIL);
-    hostRoleCommandEntity.setStartTime(SERVICE_CHECK_START_TIME);
-    hostRoleCommandEntity.setRole(Role.HDFS_SERVICE_CHECK);
+    LastServiceCheckDTO lastServiceCheckDTO = new LastServiceCheckDTO(Role.HDFS_SERVICE_CHECK.name(), SERVICE_CHECK_START_TIME);
 
     when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
-    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(singletonList(hostRoleCommandEntity));
+    when(hostRoleCommandDAO.getLatestServiceChecksByRole(any(Long.class))).thenReturn(singletonList(lastServiceCheckDTO));
 
     PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
     serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
@@ -192,7 +186,7 @@ public class ServiceCheckValidityCheckTest {
     serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
 
     when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
-    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(Collections.emptyList());
+    when(hostRoleCommandDAO.getLatestServiceChecksByRole(any(Long.class))).thenReturn(Collections.<LastServiceCheckDTO>emptyList());
 
     PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
     serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
@@ -211,23 +205,49 @@ public class ServiceCheckValidityCheckTest {
     serviceConfigEntity.setServiceName(SERVICE_NAME);
     serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
 
-    HostRoleCommandEntity hostRoleCommandEntity1 = new HostRoleCommandEntity();
-    hostRoleCommandEntity1.setRoleCommand(RoleCommand.SERVICE_CHECK);
-    hostRoleCommandEntity1.setCommandDetail(COMMAND_DETAIL);
-    hostRoleCommandEntity1.setStartTime(SERVICE_CHECK_START_TIME);
-    hostRoleCommandEntity1.setRole(Role.HDFS_SERVICE_CHECK);
-
-    HostRoleCommandEntity hostRoleCommandEntity2 = new HostRoleCommandEntity();
-    hostRoleCommandEntity2.setRoleCommand(RoleCommand.SERVICE_CHECK);
-    hostRoleCommandEntity2.setCommandDetail(COMMAND_DETAIL);
-    hostRoleCommandEntity2.setStartTime(CONFIG_CREATE_TIMESTAMP - 1L);
-    hostRoleCommandEntity2.setRole(Role.HDFS_SERVICE_CHECK);
+    LastServiceCheckDTO lastServiceCheckDTO1 = new LastServiceCheckDTO(Role.HDFS_SERVICE_CHECK.name(), SERVICE_CHECK_START_TIME);
+    LastServiceCheckDTO lastServiceCheckDTO2 = new LastServiceCheckDTO(Role.HDFS_SERVICE_CHECK.name(), CONFIG_CREATE_TIMESTAMP - 1L);
 
     when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
-    when(hostRoleCommandDAO.findAll(any(Request.class), any(Predicate.class))).thenReturn(asList(hostRoleCommandEntity1, hostRoleCommandEntity2));
+    when(hostRoleCommandDAO.getLatestServiceChecksByRole(any(Long.class))).thenReturn(asList(lastServiceCheckDTO1, lastServiceCheckDTO2));
 
     PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
     serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
     Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
   }
+
+  /**
+   * Tests that old, oudated service checks for the FOO2 service doesn't cause
+   * problems when checking values for the FOO service.
+   * <p/>
+   * The specific test case here is that the FOO2 service was added a long time
+   * ago and then removed. We don't want old service checks for FOO2 to match
+   * when querying for FOO.
+   *
+   * @throws AmbariException
+   */
+  @Test
+  public void testPassWhenSimilarlyNamedServiceIsOutdated() throws AmbariException {
+    ServiceComponent serviceComponent = mock(ServiceComponent.class);
+    when(serviceComponent.isVersionAdvertised()).thenReturn(true);
+
+    when(service.getMaintenanceState()).thenReturn(MaintenanceState.OFF);
+    when(service.getServiceComponents()).thenReturn(ImmutableMap.of(SERVICE_COMPONENT_NAME, serviceComponent));
+
+    ServiceConfigEntity serviceConfigEntity = new ServiceConfigEntity();
+    serviceConfigEntity.setServiceName(SERVICE_NAME);
+    serviceConfigEntity.setCreateTimestamp(CONFIG_CREATE_TIMESTAMP);
+
+    String hdfsRole = Role.HDFS_SERVICE_CHECK.name();
+    String hdfs2Role = hdfsRole.replace("HDFS", "HDFS2");
+
+    LastServiceCheckDTO lastServiceCheckDTO1 = new LastServiceCheckDTO(hdfsRole, SERVICE_CHECK_START_TIME);
+    LastServiceCheckDTO lastServiceCheckDTO2 = new LastServiceCheckDTO(hdfs2Role, CONFIG_CREATE_TIMESTAMP - 1L);
+
+    when(serviceConfigDAO.getLastServiceConfig(eq(CLUSTER_ID), eq(SERVICE_NAME))).thenReturn(serviceConfigEntity);
+    when(hostRoleCommandDAO.getLatestServiceChecksByRole(any(Long.class))).thenReturn(asList(lastServiceCheckDTO1, lastServiceCheckDTO2));
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, CLUSTER_NAME);
+    serviceCheckValidityCheck.perform(check, new PrereqCheckRequest(CLUSTER_NAME));
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 9309abe..b370829 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -6977,7 +6977,7 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(1, responsesWithParams.size());
     StackVersionResponse resp = responsesWithParams.iterator().next();
     assertNotNull(resp.getUpgradePacks());
-    assertEquals(15, resp.getUpgradePacks().size());
+    assertTrue(resp.getUpgradePacks().size() > 0);
     assertTrue(resp.getUpgradePacks().contains("upgrade_test"));
   }
 
@@ -9387,6 +9387,7 @@ public class AmbariManagementControllerTest {
     List<Long> requestIDs = actionDB.getRequestsByStatus(null, 1, false);
     Request request = actionDB.getRequest(requestIDs.get(0));
     assertEquals("Update Include/Exclude Files for [HDFS]", request.getRequestContext());
+    assertEquals(false, request.isExclusive());
     Type type = new TypeToken<Map<String, String>>(){}.getType();
     Map<String, String> requestParams = StageUtils.getGson().fromJson(request.getInputs(), type);
     assertEquals(2, requestParams.size());
@@ -10422,6 +10423,17 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals("FILES_LOCAL", layoutUserWidgetEntities.get(2).getWidget().getWidgetName());
     Assert.assertEquals("UPDATED_BLOCKED_TIME", layoutUserWidgetEntities.get(3).getWidget().getWidgetName());
     Assert.assertEquals("HBASE_SUMMARY", layoutUserWidgetEntities.get(0).getWidget().getDefaultSectionName());
+
+    candidateLayoutEntity = null;
+    for (WidgetLayoutEntity entity : layoutEntities) {
+      if (entity.getLayoutName().equals("default_system_heatmap")) {
+        candidateLayoutEntity = entity;
+        break;
+      }
+    }
+    Assert.assertNotNull(candidateLayoutEntity);
+    Assert.assertEquals("ambari", candidateVisibleEntity.getAuthor());
+    Assert.assertEquals("CLUSTER", candidateVisibleEntity.getScope());
   }
 
   // this is a temporary measure as a result of moving updateHostComponents from AmbariManagementController

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
index 734dd7e..68d6349 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
@@ -4997,6 +4997,7 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport {
   @Test
   public void testAtlas() throws Exception {
     final String expectedHostGroupName = "host_group_1";
+    final String zkHostGroupName = "zk_host_group";
     final String host1 = "c6401.ambari.apache.org";
     final String host2 = "c6402.ambari.apache.org";
     final String host3 = "c6403.ambari.apache.org";
@@ -5015,18 +5016,24 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport {
 
     Configuration clusterConfig = new Configuration(properties, Collections.emptyMap());
 
-    Collection<String> hgComponents = new HashSet<>();
-    hgComponents.add("KAFKA_BROKER");
-    hgComponents.add("ZOOKEEPER_SERVER");
-    hgComponents.add("HBASE_MASTER");
+    Collection<String> hg1Components = new HashSet<>();
+    hg1Components.add("KAFKA_BROKER");
+    hg1Components.add("HBASE_MASTER");
     List<String> hosts = new ArrayList<>();
     hosts.add(host1);
     hosts.add(host2);
-    hosts.add(host3);
-    TestHostGroup group1 = new TestHostGroup(expectedHostGroupName, hgComponents, hosts);
+    TestHostGroup group1 = new TestHostGroup(expectedHostGroupName, hg1Components, hosts);
+
+    // Place ZOOKEEPER_SERVER in separate host group/host other
+    // than ATLAS
+    Collection<String> zkHostGroupComponents = new HashSet<>();
+    zkHostGroupComponents.add("ZOOKEEPER_SERVER");
+
+    TestHostGroup group2 = new TestHostGroup(zkHostGroupName, zkHostGroupComponents, Collections.singletonList(host3));
 
     Collection<TestHostGroup> hostGroups = new HashSet<>();
     hostGroups.add(group1);
+    hostGroups.add(group2);
 
     ClusterTopology topology = createClusterTopology(bp, clusterConfig, hostGroups);
     BlueprintConfigurationProcessor updater = new BlueprintConfigurationProcessor(topology);
@@ -5037,29 +5044,29 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport {
     List<String> hostArray =
       Arrays.asList(atlasProperties.get("atlas.kafka.bootstrap.servers").split(","));
     List<String> expected =
-      Arrays.asList("c6401.ambari.apache.org:6667", "c6402.ambari.apache.org:6667", "c6403.ambari.apache.org:6667");
+      Arrays.asList("c6401.ambari.apache.org:6667", "c6402.ambari.apache.org:6667");
 
     Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
 
     hostArray = Arrays.asList(atlasProperties.get("atlas.kafka.zookeeper.connect").split(","));
     expected =
-      Arrays.asList("c6401.ambari.apache.org:2181", "c6402.ambari.apache.org:2181", "c6403.ambari.apache.org:2181");
+      Arrays.asList("c6403.ambari.apache.org:2181");
     Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
 
 
     hostArray = Arrays.asList(atlasProperties.get("atlas.graph.index.search.solr.zookeeper-url").split(","));
     expected =
-      Arrays.asList("c6401.ambari.apache.org:2181/ambari-solr", "c6402.ambari.apache.org:2181/ambari-solr", "c6403.ambari.apache.org:2181/ambari-solr");
+      Arrays.asList("c6403.ambari.apache.org:2181/ambari-solr");
     Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
 
     hostArray = Arrays.asList(atlasProperties.get("atlas.graph.storage.hostname").split(","));
     expected =
-      Arrays.asList("c6401.ambari.apache.org", "c6402.ambari.apache.org", "c6403.ambari.apache.org");
+      Arrays.asList("c6403.ambari.apache.org");
     Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
 
     hostArray = Arrays.asList(atlasProperties.get("atlas.audit.hbase.zookeeper.quorum").split(","));
     expected =
-      Arrays.asList("c6401.ambari.apache.org", "c6402.ambari.apache.org", "c6403.ambari.apache.org");
+      Arrays.asList("c6403.ambari.apache.org");
     Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
   }
 


[7/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

Posted by mp...@apache.org.
AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5ae98dbe
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5ae98dbe
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5ae98dbe

Branch: refs/heads/branch-3.0-perf
Commit: 5ae98dbeb1ad7752d02235a61d2869be2e284f55
Parents: 3051fa9
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Tue Oct 10 18:37:05 2017 +0300
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Tue Oct 10 18:37:06 2017 +0300

----------------------------------------------------------------------
 ambari-server/pom.xml                           |   6 +
 .../actionmanager/ActionDBAccessorImpl.java     |  11 +
 .../server/actionmanager/ActionScheduler.java   |  10 +-
 .../actionmanager/ExecutionCommandWrapper.java  | 121 +++---
 .../ambari/server/agent/CommandRepository.java  |  51 ++-
 .../resources/ClusterResourceDefinition.java    |   1 +
 .../server/api/services/AmbariMetaInfo.java     |   2 +-
 .../ambari/server/checks/CheckDescription.java  |  14 +-
 .../checks/DatabaseConsistencyCheckHelper.java  | 128 +++++-
 .../checks/DruidHighAvailabilityCheck.java      | 121 ++++++
 .../checks/ServiceCheckValidityCheck.java       |  84 ++--
 .../AmbariCustomCommandExecutionHelper.java     |  16 +-
 .../AmbariManagementControllerImpl.java         |  39 +-
 .../server/controller/ConfigGroupRequest.java   |  14 +-
 .../server/controller/ControllerModule.java     |   2 +
 .../controller/DeleteIdentityHandler.java       |   3 +-
 .../server/controller/KerberosHelper.java       |   8 +-
 .../server/controller/KerberosHelperImpl.java   |  13 +-
 .../server/controller/RepositoryResponse.java   |  23 +-
 .../controller/ResourceProviderFactory.java     |   4 +
 .../AbstractControllerResourceProvider.java     |   2 +-
 .../internal/AbstractProviderModule.java        |  15 +-
 .../BlueprintConfigurationProcessor.java        |  16 +-
 .../internal/ClientConfigResourceProvider.java  |   2 +
 .../ClusterStackVersionResourceProvider.java    |  21 +-
 .../internal/ComponentResourceProvider.java     |   4 +
 .../internal/ConfigGroupResourceProvider.java   |   8 +-
 .../internal/DefaultProviderModule.java         |   2 -
 .../internal/RepositoryResourceProvider.java    |   6 +
 .../RepositoryVersionResourceProvider.java      |   6 +-
 .../internal/ServiceResourceProvider.java       |   8 +
 .../internal/UpgradeResourceProvider.java       |  24 +-
 .../VersionDefinitionResourceProvider.java      |   4 +
 .../server/events/AlertReceivedEvent.java       |   8 +-
 .../events/ClusterConfigFinishedEvent.java      |  15 +-
 .../DistributeRepositoriesActionListener.java   |   8 +
 .../listeners/upgrade/StackVersionListener.java |  12 +-
 .../CachedRoleCommandOrderProvider.java         |   2 +
 .../ambari/server/orm/DBAccessorImpl.java       |   6 +-
 .../server/orm/dao/AlertDefinitionDAO.java      |  13 +
 .../ambari/server/orm/dao/ClusterDAO.java       |  22 +-
 .../server/orm/dao/HostRoleCommandDAO.java      |  47 +++
 .../ambari/server/orm/dao/ServiceConfigDAO.java |   2 +-
 .../orm/entities/AlertDefinitionEntity.java     |   4 +-
 .../orm/entities/HostRoleCommandEntity.java     |  64 ++-
 .../server/orm/entities/RepositoryEntity.java   |  22 +
 .../orm/entities/RepositoryVersionEntity.java   |  37 ++
 .../server/orm/entities/UpgradeItemEntity.java  |   2 +-
 .../scheduler/ExecutionScheduleManager.java     |  27 +-
 .../AbstractPrepareKerberosServerAction.java    |  14 +-
 .../kerberos/CreateKeytabFilesServerAction.java |   9 +-
 .../kerberos/KerberosIdentityDataFile.java      |   2 +-
 .../KerberosIdentityDataFileWriter.java         |   9 +-
 .../PrepareDisableKerberosServerAction.java     |   2 +-
 .../PrepareEnableKerberosServerAction.java      |   2 +-
 .../PrepareKerberosIdentitiesServerAction.java  |   3 +-
 .../upgrades/FinalizeUpgradeAction.java         |   5 +
 .../upgrades/PreconfigureKerberosAction.java    |   6 +-
 .../RangerUsersyncConfigCalculation.java        |  96 +++++
 .../apache/ambari/server/stack/RepoUtil.java    |   4 +-
 .../ambari/server/stack/StackContext.java       | 118 ++++--
 .../apache/ambari/server/stack/StackModule.java |  48 ++-
 .../server/stack/StackServiceDirectory.java     |   7 +-
 .../ambari/server/state/RepositoryInfo.java     |  27 +-
 .../server/state/ServiceComponentImpl.java      |  25 ++
 .../ambari/server/state/ServiceOsSpecific.java  |  16 +
 .../apache/ambari/server/state/StackInfo.java   |   1 -
 .../ambari/server/state/UpgradeHelper.java      | 109 ++++-
 .../server/state/cluster/ClusterImpl.java       |   4 +-
 .../kerberos/AbstractKerberosDescriptor.java    |   4 +-
 .../AbstractKerberosDescriptorContainer.java    |  25 +-
 .../kerberos/KerberosComponentDescriptor.java   |   2 +-
 .../state/kerberos/KerberosDescriptor.java      |  15 +-
 .../kerberos/KerberosIdentityDescriptor.java    |  78 +---
 .../kerberos/KerberosKeytabDescriptor.java      |  42 +-
 .../kerberos/KerberosPrincipalDescriptor.java   |  25 +-
 .../kerberos/KerberosServiceDescriptor.java     |  12 +-
 .../services/AmbariServerAlertService.java      |  21 +-
 .../server/state/stack/RepoUrlInfoCallable.java | 217 ++++++++++
 .../server/state/stack/RepoVdfCallable.java     | 161 ++++++++
 .../server/state/stack/RepositoryXml.java       |  11 +
 .../state/stack/upgrade/ConfigureTask.java      |   3 +
 .../stack/upgrade/RepositoryVersionHelper.java  |  11 +
 .../ambari/server/topology/AmbariContext.java   |   6 +-
 .../ambari/server/topology/TopologyManager.java |  96 +++--
 .../topology/tasks/ConfigureClusterTask.java    |  13 +-
 .../tasks/ConfigureClusterTaskFactory.java      |   3 +-
 .../server/upgrade/UpgradeCatalog260.java       | 345 +++++++++++++++-
 .../server/utils/ManagedThreadPoolExecutor.java |  82 ++++
 .../resources/Ambari-DDL-AzureDB-CREATE.sql     |   3 +-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   3 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   3 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   3 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   3 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   3 +-
 .../actionmanager/TestActionDBAccessorImpl.java |  63 ++-
 .../server/agent/TestHeartbeatHandler.java      |   5 +-
 .../server/agent/TestHeartbeatMonitor.java      |  20 +-
 .../DatabaseConsistencyCheckHelperTest.java     |  84 ++++
 .../checks/DruidHighAvailabilityCheckTest.java  | 176 ++++++++
 .../checks/ServiceCheckValidityCheckTest.java   |  92 +++--
 .../AmbariManagementControllerTest.java         |  14 +-
 .../BlueprintConfigurationProcessorTest.java    |  29 +-
 ...ClusterStackVersionResourceProviderTest.java | 232 ++++-------
 .../internal/ComponentResourceProviderTest.java |   6 +
 .../RepositoryResourceProviderTest.java         |  20 +-
 .../StackUpgradeConfigurationMergeTest.java     | 136 +++++++
 .../internal/UpgradeResourceProviderTest.java   | 108 +++++
 .../upgrade/StackVersionListenerTest.java       |  46 +++
 .../scheduler/ExecutionScheduleManagerTest.java |  43 +-
 ...AbstractPrepareKerberosServerActionTest.java |   2 +-
 .../kerberos/KerberosIdentityDataFileTest.java  |  10 +-
 .../kerberos/KerberosServerActionTest.java      |   2 +-
 .../ComponentVersionCheckActionTest.java        |  14 -
 .../RangerUsersyncConfigCalculationTest.java    | 126 ++++++
 .../server/stack/KerberosDescriptorTest.java    | 164 +++-----
 .../server/stack/StackServiceDirectoryTest.java |  76 ++++
 .../server/state/ServiceComponentTest.java      |  21 +-
 .../ambari/server/state/UpgradeHelperTest.java  |  44 ++
 .../KerberosComponentDescriptorTest.java        |  20 +-
 .../KerberosConfigurationDescriptorTest.java    |   4 +-
 .../state/kerberos/KerberosDescriptorTest.java  |  54 +--
 .../KerberosDescriptorUpdateHelperTest.java     |   4 +-
 .../KerberosIdentityDescriptorTest.java         |  51 ++-
 .../kerberos/KerberosKeytabDescriptorTest.java  |  30 +-
 .../KerberosPrincipalDescriptorTest.java        |  26 +-
 .../kerberos/KerberosServiceDescriptorTest.java |  26 +-
 .../server/state/stack/UpgradePackTest.java     |   1 +
 .../ClusterDeployWithStartOnlyTest.java         |   2 +-
 ...InstallWithoutStartOnComponentLevelTest.java |   2 +-
 .../ClusterInstallWithoutStartTest.java         |   2 +-
 .../topology/ConfigureClusterTaskTest.java      |  18 +-
 .../server/topology/TopologyManagerTest.java    |   2 +-
 .../server/upgrade/UpgradeCatalog260Test.java   | 398 +++++++++++++++----
 .../utils/ManagedThreadPoolExecutorTest.java    |  51 +++
 135 files changed, 3948 insertions(+), 1053 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index de6ff51..bb0fa0a 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -1769,6 +1769,12 @@
       <artifactId>jna</artifactId>
       <version>4.2.2</version>
     </dependency>
+    <dependency>
+      <groupId>com.networknt</groupId>
+      <artifactId>json-schema-validator</artifactId>
+      <version>0.1.10</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <pluginRepositories>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
index 555e2ad..f118c92 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
@@ -552,6 +552,12 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
             reportedTaskStatus = HostRoleStatus.SKIPPED_FAILED;
           }
         }
+
+        // if TIMEOUT and marked for holding then set status = HOLDING_TIMEOUT
+        if (reportedTaskStatus == HostRoleStatus.TIMEDOUT && commandEntity.isRetryAllowed()){
+          reportedTaskStatus = HostRoleStatus.HOLDING_TIMEDOUT;
+        }
+
         if (!existingTaskStatus.isCompletedState()) {
           commandEntity.setStatus(reportedTaskStatus);
         }
@@ -615,6 +621,11 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
         }
       }
 
+      // if TIMEOUT and marked for holding then set status = HOLDING_TIMEOUT
+      if (status == HostRoleStatus.TIMEDOUT && command.isRetryAllowed()){
+        status = HostRoleStatus.HOLDING_TIMEDOUT;
+      }
+
       command.setStatus(status);
       command.setStdOut(report.getStdOut().getBytes());
       command.setStdError(report.getStdErr().getBytes());

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index c41dd01..5c830ba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -927,12 +927,14 @@ class ActionScheduler implements Runnable {
       RoleCommandPair roleCommand = new
               RoleCommandPair(Role.valueOf(command.getRole()), command.getRoleCommand());
       Set<RoleCommandPair> roleCommandDependencies = rco.getDependencies().get(roleCommand);
-      // remove eventual references to the same RoleCommand
-      roleCommandDependencies.remove(roleCommand);
 
       // check if there are any dependencies IN_PROGRESS
-      if (roleCommandDependencies != null && CollectionUtils.containsAny(rolesCommandsInProgress, roleCommandDependencies)) {
-        areCommandDependenciesFinished = false;
+      if (roleCommandDependencies != null) {
+        // remove eventual references to the same RoleCommand
+        roleCommandDependencies.remove(roleCommand);
+        if (CollectionUtils.containsAny(rolesCommandsInProgress, roleCommandDependencies)) {
+          areCommandDependenciesFinished = false;
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
index 903619e..79ca5ba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
@@ -19,6 +19,7 @@ package org.apache.ambari.server.actionmanager;
 
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION;
 
 import java.util.Map;
 import java.util.TreeMap;
@@ -29,7 +30,6 @@ import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.ServiceNotFoundException;
 import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
 import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
@@ -167,88 +167,93 @@ public class ExecutionCommandWrapper {
       configHelper.getAndMergeHostConfigAttributes(executionCommand.getConfigurationAttributes(),
           configurationTags, cluster);
 
-      // set the repository version for the component this command is for -
-      // always use the current desired version
-      try {
-        RepositoryVersionEntity repositoryVersion = null;
-        String serviceName = executionCommand.getServiceName();
-        if (!StringUtils.isEmpty(serviceName)) {
-          Service service = cluster.getService(serviceName);
-          if (null != service) {
-            repositoryVersion = service.getDesiredRepositoryVersion();
-          }
+      setVersions(cluster);
+
+      // provide some basic information about a cluster upgrade if there is one
+      // in progress
+      UpgradeEntity upgrade = cluster.getUpgradeInProgress();
+      if (null != upgrade) {
+        UpgradeContext upgradeContext = upgradeContextFactory.create(cluster, upgrade);
+        UpgradeSummary upgradeSummary = upgradeContext.getUpgradeSummary();
+        executionCommand.setUpgradeSummary(upgradeSummary);
+      }
+
+    } catch (ClusterNotFoundException cnfe) {
+      // it's possible that there are commands without clusters; in such cases,
+      // just return the de-serialized command and don't try to read configs
+      LOG.warn(
+          "Unable to lookup the cluster by ID; assuming that there is no cluster and therefore no configs for this execution command: {}",
+          cnfe.getMessage());
+
+      return executionCommand;
+    } catch (AmbariException e) {
+      throw new RuntimeException(e);
+    }
+
+    return executionCommand;
+  }
+
+  public void setVersions(Cluster cluster) {
+    // set the repository version for the component this command is for -
+    // always use the current desired version
+    String serviceName = executionCommand.getServiceName();
+    try {
+      RepositoryVersionEntity repositoryVersion = null;
+      if (!StringUtils.isEmpty(serviceName)) {
+        Service service = cluster.getService(serviceName);
+        if (null != service) {
+          repositoryVersion = service.getDesiredRepositoryVersion();
 
           String componentName = executionCommand.getComponentName();
           if (!StringUtils.isEmpty(componentName)) {
-            ServiceComponent serviceComponent = service.getServiceComponent(
-                executionCommand.getComponentName());
-
+            ServiceComponent serviceComponent = service.getServiceComponent(componentName);
             if (null != serviceComponent) {
               repositoryVersion = serviceComponent.getDesiredRepositoryVersion();
             }
           }
         }
+      }
 
-        Map<String, String> commandParams = executionCommand.getCommandParams();
+      Map<String, String> commandParams = executionCommand.getCommandParams();
 
-        if (null != repositoryVersion) {
-          // only set the version if it's not set and this is NOT an install
-          // command
-          if (!commandParams.containsKey(KeyNames.VERSION)
-              && executionCommand.getRoleCommand() != RoleCommand.INSTALL) {
-            commandParams.put(KeyNames.VERSION, repositoryVersion.getVersion());
-          }
+      if (null != repositoryVersion) {
+        // only set the version if it's not set and this is NOT an install
+        // command
+        if (!commandParams.containsKey(VERSION)
+          && executionCommand.getRoleCommand() != RoleCommand.INSTALL) {
+          commandParams.put(VERSION, repositoryVersion.getVersion());
+        }
 
-          StackId stackId = repositoryVersion.getStackId();
-          StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
-              stackId.getStackVersion());
+        StackId stackId = repositoryVersion.getStackId();
+        StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
+          stackId.getStackVersion());
 
-          if (!commandParams.containsKey(HOOKS_FOLDER)) {
-            commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
-          }
+        if (!commandParams.containsKey(HOOKS_FOLDER)) {
+          commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
+        }
 
-          if (!commandParams.containsKey(SERVICE_PACKAGE_FOLDER)) {
-            if (!StringUtils.isEmpty(serviceName)) {
-              ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
-                  stackId.getStackVersion(), serviceName);
+        if (!commandParams.containsKey(SERVICE_PACKAGE_FOLDER)) {
+          if (!StringUtils.isEmpty(serviceName)) {
+            ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
+              stackId.getStackVersion(), serviceName);
 
-              commandParams.put(SERVICE_PACKAGE_FOLDER, serviceInfo.getServicePackageFolder());
-            }
+            commandParams.put(SERVICE_PACKAGE_FOLDER, serviceInfo.getServicePackageFolder());
           }
         }
-      } catch (ServiceNotFoundException serviceNotFoundException) {
-        // it's possible that there are commands specified for a service where
-        // the service doesn't exist yet
-        LOG.warn(
-            "The service {} is not installed in the cluster. No repository version will be sent for this command.",
-            executionCommand.getServiceName());
       }
 
       // set the desired versions of versionable components.  This is safe even during an upgrade because
       // we are "loading-late": components that have not yet upgraded in an EU will have the correct versions.
       executionCommand.setComponentVersions(cluster);
-
-      // provide some basic information about a cluster upgrade if there is one
-      // in progress
-      UpgradeEntity upgrade = cluster.getUpgradeInProgress();
-      if (null != upgrade) {
-        UpgradeContext upgradeContext = upgradeContextFactory.create(cluster, upgrade);
-        UpgradeSummary upgradeSummary = upgradeContext.getUpgradeSummary();
-        executionCommand.setUpgradeSummary(upgradeSummary);
-      }
-    } catch (ClusterNotFoundException cnfe) {
-      // it's possible that there are commands without clusters; in such cases,
-      // just return the de-serialized command and don't try to read configs
+    } catch (ServiceNotFoundException serviceNotFoundException) {
+      // it's possible that there are commands specified for a service where
+      // the service doesn't exist yet
       LOG.warn(
-          "Unable to lookup the cluster by ID; assuming that there is no cluster and therefore no configs for this execution command: {}",
-          cnfe.getMessage());
-
-      return executionCommand;
+        "The service {} is not installed in the cluster. No repository version will be sent for this command.",
+        serviceName);
     } catch (AmbariException e) {
       throw new RuntimeException(e);
     }
-
-    return executionCommand;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
index 858a55f..301f475 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
@@ -46,6 +46,13 @@ public class CommandRepository {
   private String m_stackName;
 
   /**
+   * {@code true} if Ambari believes that this repository has reported back it's
+   * version after distribution.
+   */
+  @SerializedName("resolved")
+  private boolean m_resolved;
+
+  /**
    * @param version the repo version
    */
   public void setRepositoryVersion(String version) {
@@ -119,6 +126,27 @@ public class CommandRepository {
   }
 
   /**
+   * Gets whether this repository has been marked as having its version
+   * resolved.
+   *
+   * @return {@code true} if this repository has been confirmed to have the
+   *         right version.
+   */
+  public boolean isResolved() {
+    return m_resolved;
+  }
+
+  /**
+   * Gets whether this repository has had its version resolved.
+   *
+   * @param resolved
+   *          {@code true} to mark this repository as being resolved.
+   */
+  public void setResolved(boolean resolved) {
+    m_resolved = resolved;
+  }
+
+  /**
    * Minimal information required to generate repo files on the agent.  These are copies
    * of the repository objects from repo versions that can be changed for URL overrides, etc.
    */
@@ -133,12 +161,16 @@ public class CommandRepository {
     @SerializedName("ambariManaged")
     private boolean m_ambariManaged = true;
 
-    /**
-     * The name should not change.  Ubuntu requires that it match exactly as the repo was built.
-     */
+
     @SerializedName("repoName")
     private final String m_repoName;
 
+    @SerializedName("distribution")
+    private final String m_distribution;
+
+    @SerializedName("components")
+    private final String m_components;
+
     @SerializedName("mirrorsList")
     private String m_mirrorsList;
 
@@ -149,6 +181,8 @@ public class CommandRepository {
       m_osType = info.getOsType();
       m_repoId = info.getRepoId();
       m_repoName = info.getRepoName();
+      m_distribution = info.getDistribution();
+      m_components = info.getComponents();
       m_mirrorsList = info.getMirrorsList();
     }
 
@@ -156,6 +190,8 @@ public class CommandRepository {
       m_baseUrl = entity.getBaseUrl();
       m_repoId = entity.getRepositoryId();
       m_repoName = entity.getName();
+      m_distribution = entity.getDistribution();
+      m_components = entity.getComponents();
       m_mirrorsList = entity.getMirrorsList();
       m_osType = osType;
     }
@@ -176,6 +212,13 @@ public class CommandRepository {
       return m_repoName;
     }
 
+    public String getDistribution() {
+      return m_distribution;
+    }
+
+    public String getComponents() {
+      return m_components;
+    }
 
     public String getBaseUrl() {
       return m_baseUrl;
@@ -193,6 +236,8 @@ public class CommandRepository {
       return new ToStringBuilder(null)
           .append("os", m_osType)
           .append("name", m_repoName)
+          .append("distribution", m_distribution)
+          .append("components", m_components)
           .append("id", m_repoId)
           .append("baseUrl", m_baseUrl)
           .toString();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
index 8933dd3..9d0c169 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
@@ -87,6 +87,7 @@ public class ClusterResourceDefinition extends BaseResourceDefinition {
     directives.add(KerberosHelper.DIRECTIVE_FORCE_TOGGLE_KERBEROS);
     directives.add(KerberosHelper.DIRECTIVE_HOSTS);
     directives.add(KerberosHelper.DIRECTIVE_COMPONENTS);
+    directives.add(KerberosHelper.DIRECTIVE_IGNORE_CONFIGS);
     return directives;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 1787b49..2a794de 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -1363,7 +1363,7 @@ public class AmbariMetaInfo {
   /**
    * Ensures that the map of version definition files is populated
    */
-  private void ensureVersionDefinitions() {
+  private synchronized void ensureVersionDefinitions() {
     if (null != versionDefinitions) {
       return;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index 4fd37dc..314ab83 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -341,6 +341,18 @@ public class CheckDescription {
         .put(AbstractCheckDescriptor.DEFAULT, "The following components do not exist in the target repository's stack. They must be removed from the cluster before upgrading.")
           .build());
 
+  public static CheckDescription DRUID_HA_WARNING = new CheckDescription(
+      "DRUID_HA",
+      PrereqCheckType.SERVICE,
+      "Druid Downtime During Upgrade",
+      new ImmutableMap.Builder<String, String>()
+          .put(
+              AbstractCheckDescriptor.DEFAULT,
+              "High Availability is not enabled for Druid. Druid Service may have some downtime during upgrade. Deploy multiple instances of %s in the Cluster to avoid any downtime."
+          )
+          .build()
+  );
+
   private String m_name;
   private PrereqCheckType m_type;
   private String m_description;
@@ -380,4 +392,4 @@ public class CheckDescription {
   public String getFail(String key) {
     return m_fails.containsKey(key) ? m_fails.get(key) : "";
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
index 054c470..34888f2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
@@ -40,6 +40,7 @@ import java.util.regex.Pattern;
 import javax.annotation.Nullable;
 import javax.inject.Provider;
 import javax.persistence.EntityManager;
+import javax.persistence.Query;
 import javax.persistence.TypedQuery;
 
 import org.apache.ambari.server.AmbariException;
@@ -59,6 +60,8 @@ import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
 import org.apache.ambari.server.state.ClientConfigFileDefinition;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.State;
@@ -92,6 +95,10 @@ public class DatabaseConsistencyCheckHelper {
   private static StageDAO stageDAO;
 
   private static DatabaseConsistencyCheckResult checkResult = DatabaseConsistencyCheckResult.DB_CHECK_SUCCESS;
+  public static final String GET_CONFIGS_SELECTED_MORE_THAN_ONCE_QUERY = "select c.cluster_name, cc.type_name from clusterconfig cc " +
+      "join clusters c on cc.cluster_id=c.cluster_id " +
+      "group by c.cluster_name, cc.type_name " +
+      "having sum(cc.selected) > 1";
 
   /**
    * @return The result of the DB cheks run so far.
@@ -174,6 +181,7 @@ public class DatabaseConsistencyCheckHelper {
       if (fixIssues) {
         fixHostComponentStatesCountEqualsHostComponentsDesiredStates();
         fixClusterConfigsNotMappedToAnyService();
+        fixConfigsSelectedMoreThanOnce();
       }
       checkSchemaName();
       checkMySQLEngine();
@@ -317,7 +325,7 @@ public class DatabaseConsistencyCheckHelper {
             warning("Unable to get size for table {}!", tableName);
           }
         } catch (SQLException ex) {
-          error(String.format("Failed to get %s row count: ", tableName), e);
+          warning(String.format("Failed to get %s row count: ", tableName), e);
         }
       } finally {
         if (rs != null) {
@@ -376,7 +384,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
     } catch (SQLException e) {
-      error("Exception occurred during check for config selected more than once procedure: ", e);
+      warning("Exception occurred during check for config selected more than once procedure: ", e);
     } finally {
       if (rs != null) {
         try {
@@ -420,12 +428,12 @@ public class DatabaseConsistencyCheckHelper {
         }
 
         if (!hostsWithoutStatus.isEmpty()) {
-          error("You have host(s) without state (in hoststate table): " + StringUtils.join(hostsWithoutStatus, ","));
+          warning("You have host(s) without state (in hoststate table): " + StringUtils.join(hostsWithoutStatus, ","));
         }
       }
 
     } catch (SQLException e) {
-      error("Exception occurred during check for host without state procedure: ", e);
+      warning("Exception occurred during check for host without state procedure: ", e);
     } finally {
       if (rs != null) {
         try {
@@ -458,7 +466,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
     } catch (SQLException e) {
-      error("Exception occurred during topology request tables check: ", e);
+      warning("Exception occurred during topology request tables check: ", e);
     } finally {
       if (rs != null) {
         try {
@@ -523,7 +531,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       if (hostComponentStateCount != hostComponentDesiredStateCount || hostComponentStateCount != mergedCount) {
-        error("Your host component states (hostcomponentstate table) count not equals host component desired states (hostcomponentdesiredstate table) count!");
+        warning("Your host component states (hostcomponentstate table) count not equals host component desired states (hostcomponentdesiredstate table) count!");
       }
 
 
@@ -535,11 +543,11 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       for (Map.Entry<String, String> component : hostComponentStateDuplicates.entrySet()) {
-        error("Component {} on host with id {}, has more than one host component state (hostcomponentstate table)!", component.getKey(), component.getValue());
+        warning("Component {} on host with id {}, has more than one host component state (hostcomponentstate table)!", component.getKey(), component.getValue());
       }
 
     } catch (SQLException e) {
-      error("Exception occurred during check for same count of host component states and host component desired states: ", e);
+      warning("Exception occurred during check for same count of host component states and host component desired states: ", e);
     } finally {
       if (rs != null) {
         try {
@@ -774,11 +782,11 @@ public class DatabaseConsistencyCheckHelper {
           tablesInfo.add(rs.getString("TABLE_NAME"));
         }
         if (!tablesInfo.isEmpty()){
-          error("Found tables with engine type that is not InnoDB : {}", tablesInfo);
+          warning("Found tables with engine type that is not InnoDB : {}", tablesInfo);
         }
       }
     } catch (SQLException e) {
-      error("Exception occurred during checking MySQL engine to be innodb: ", e);
+      warning("Exception occurred during checking MySQL engine to be innodb: ", e);
     } finally {
       if (rs != null) {
         try {
@@ -791,12 +799,102 @@ public class DatabaseConsistencyCheckHelper {
   }
 
   /**
+   * Fix inconsistencies found by {@code checkForConfigsSelectedMoreThanOnce}
+   * selecting latest one by selectedTimestamp
+   */
+  @Transactional
+  static void fixConfigsSelectedMoreThanOnce() {
+    LOG.info("Fix configs selected more than once");
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+
+    Clusters clusters = injector.getInstance(Clusters.class);
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+
+    Multimap<String, String> clusterConfigTypeMap = HashMultimap.create();
+    ResultSet rs = null;
+    Statement statement = null;
+
+    ensureConnection();
+
+    try {
+      statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
+      rs = statement.executeQuery(GET_CONFIGS_SELECTED_MORE_THAN_ONCE_QUERY);
+      if (rs != null) {
+        while (rs.next()) {
+          clusterConfigTypeMap.put(rs.getString("cluster_name"), rs.getString("type_name"));
+        }
+      }
+
+    } catch (SQLException e) {
+      warning("Exception occurred during check for config selected more than once procedure: ", e);
+    } finally {
+      if (rs != null) {
+        try {
+          rs.close();
+        } catch (SQLException e) {
+          LOG.error("Exception occurred during result set closing procedure: ", e);
+        }
+      }
+
+      if (statement != null) {
+        try {
+          statement.close();
+        } catch (SQLException e) {
+          LOG.error("Exception occurred during statement closing procedure: ", e);
+        }
+      }
+    }
+
+    for (String clusterName : clusterConfigTypeMap.keySet()) {
+      Cluster cluster = null;
+      try {
+        cluster = clusters.getCluster(clusterName);
+
+        Collection<String> typesWithMultipleSelectedConfigs = clusterConfigTypeMap.get(clusterName);
+
+        for (String type: typesWithMultipleSelectedConfigs) {
+          List<ClusterConfigEntity> enabledConfigsByType = getEnabledConfigsByType(cluster.getClusterId(), type);
+          ClusterConfigEntity latestConfig = enabledConfigsByType.get(0);
+          for (ClusterConfigEntity entity : enabledConfigsByType){
+            entity.setSelected(false);
+            if (latestConfig.getSelectedTimestamp() < entity.getSelectedTimestamp()){
+              latestConfig = entity;
+            }
+            clusterDAO.merge(entity, true);
+          }
+          latestConfig.setSelected(true);
+          clusterDAO.merge(latestConfig, true);
+        }
+      } catch (AmbariException e) {
+        warning("Exception occurred during fix for config selected more than once procedure: ", e);
+      }
+    }
+  }
+
+  /**
+   * Find ClusterConfigs with selected = 1
+   * @return ClusterConfigs that are not mapped to Service by type
+   */
+  private static List<ClusterConfigEntity> getEnabledConfigsByType(long clusterId, String type) {
+
+    Provider<EntityManager> entityManagerProvider = injector.getProvider(EntityManager.class);
+    EntityManager entityManager = entityManagerProvider.get();
+
+    Query query = entityManager.createNamedQuery("ClusterConfigEntity.findEnabledConfigByType",ClusterConfigEntity.class);
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("type", type);
+
+    return (List<ClusterConfigEntity>) query.getResultList();
+  }
+
+  /**
   * This method checks several potential problems for services:
   * 1) Check if we have services in cluster which doesn't have service config id(not available in serviceconfig table).
   * 2) Check if service has no mapped configs to it's service config id.
   * 3) Check if service has all required configs mapped to it.
   * 4) Check if service has config which is not selected(has no actual config version)
-  * If any issue was discovered, we are showing error message for user.
+  * If any issue was discovered, we are showing warning message for user.
   * */
   static void checkServiceConfigs()  {
     LOG.info("Checking services and their configs");
@@ -875,7 +973,7 @@ public class DatabaseConsistencyCheckHelper {
         for (String clName : clusterServiceVersionMap.keySet()) {
           Multimap<String, String> serviceVersion = clusterServiceVersionMap.get(clName);
           for (String servName : serviceVersion.keySet()) {
-            error("In cluster {}, service config mapping is unavailable (in table serviceconfigmapping) for service {} with version(s) {}! ", clName, servName, StringUtils.join(serviceVersion.get(servName), ","));
+            warning("In cluster {}, service config mapping is unavailable (in table serviceconfigmapping) for service {} with version(s) {}! ", clName, servName, StringUtils.join(serviceVersion.get(servName), ","));
           }
         }
 
@@ -986,7 +1084,7 @@ public class DatabaseConsistencyCheckHelper {
                   }
 
                   if (!serviceConfigsFromStack.isEmpty()) {
-                    error("Required config(s): {} is(are) not available for service {} with service config version {} in cluster {}",
+                    warning("Required config(s): {} is(are) not available for service {} with service config version {} in cluster {}",
                             StringUtils.join(serviceConfigsFromStack, ","), serviceName, Integer.toString(serviceVersion), clusterName);
                   }
                 }
@@ -1024,11 +1122,11 @@ public class DatabaseConsistencyCheckHelper {
       for (String clusterName : clusterServiceConfigType.keySet()) {
         Multimap<String, String> serviceConfig = clusterServiceConfigType.get(clusterName);
         for (String serviceName : serviceConfig.keySet()) {
-          error("You have non selected configs: {} for service {} from cluster {}!", StringUtils.join(serviceConfig.get(serviceName), ","), serviceName, clusterName);
+          warning("You have non selected configs: {} for service {} from cluster {}!", StringUtils.join(serviceConfig.get(serviceName), ","), serviceName, clusterName);
         }
       }
     } catch (SQLException | AmbariException e) {
-      error("Exception occurred during complex service check procedure: ", e);
+      warning("Exception occurred during complex service check procedure: ", e);
     } finally {
       if (rs != null) {
         try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheck.java
new file mode 100644
index 0000000..88502ba
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DruidHighAvailabilityCheck.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ServiceComponentNotFoundException;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.apache.commons.lang.StringUtils;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.google.inject.Singleton;
+
+/**
+ * Checks that namenode high availability is enabled.
+ */
+@Singleton
+@UpgradeCheck(group = UpgradeCheckGroup.MULTIPLE_COMPONENT_WARNING, order = 16.0f)
+public class DruidHighAvailabilityCheck extends AbstractCheckDescriptor
+{
+
+  public static final String DRUID_SERVICE_NAME = "DRUID";
+  public static final String[] DRUID_COMPONENT_NAMES = new String[]{
+      "DRUID_BROKER",
+      "DRUID_COORDINATOR",
+      "DRUID_HISTORICAL",
+      "DRUID_OVERLORD",
+      "DRUID_MIDDLEMANAGER",
+      "DRUID_ROUTER"
+  };
+
+  /**
+   * Constructor.
+   */
+  public DruidHighAvailabilityCheck()
+  {
+    super(CheckDescription.DRUID_HA_WARNING);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public Set<String> getApplicableServices()
+  {
+    return Sets.newHashSet(DRUID_SERVICE_NAME);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<CheckQualification> getQualifications()
+  {
+    return Arrays.asList(
+        new PriorCheckQualification(CheckDescription.DRUID_HA_WARNING));
+  }
+
+  @Override
+  public void perform(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest request) throws AmbariException
+  {
+    List<String> haNotEnabledComponents = Lists.newArrayList();
+    for (String component : DRUID_COMPONENT_NAMES) {
+      Set<String> hosts = getHostsForComponent(request, component);
+      if (hosts.size() == 1) {
+        // This component is installed on only 1 host, HA is not enabled for it.
+        haNotEnabledComponents.add(component);
+      }
+    }
+    if (!haNotEnabledComponents.isEmpty()) {
+      prerequisiteCheck.getFailedOn().add(DRUID_SERVICE_NAME);
+      prerequisiteCheck.setStatus(PrereqCheckStatus.WARNING);
+      String failReason = getFailReason(prerequisiteCheck, request);
+      prerequisiteCheck.setFailReason(String.format(failReason, StringUtils.join(haNotEnabledComponents.toArray(), ", ")));
+    }
+
+  }
+
+  private Set<String> getHostsForComponent(PrereqCheckRequest request, String componentName)
+      throws AmbariException
+  {
+    Set<String> hosts = new HashSet<>();
+    final String clusterName = request.getClusterName();
+    final Cluster cluster = clustersProvider.get().getCluster(clusterName);
+    try {
+      ServiceComponent serviceComponent = cluster.getService(DRUID_SERVICE_NAME).getServiceComponent(componentName);
+      if (serviceComponent != null) {
+        hosts = serviceComponent.getServiceComponentHosts().keySet();
+      }
+    }
+    catch (ServiceComponentNotFoundException err) {
+      // This exception can be ignored if the component doesn't exist because it is a best-attempt at finding it.
+    }
+
+    return hosts;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
index ad68a2c..a4c2430 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServiceCheckValidityCheck.java
@@ -19,29 +19,19 @@ package org.apache.ambari.server.checks;
 
 import java.text.SimpleDateFormat;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
-import org.apache.ambari.server.controller.internal.PageRequestImpl;
-import org.apache.ambari.server.controller.internal.RequestImpl;
-import org.apache.ambari.server.controller.internal.SortRequestImpl;
-import org.apache.ambari.server.controller.internal.TaskResourceProvider;
-import org.apache.ambari.server.controller.spi.PageRequest;
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.SortRequest;
-import org.apache.ambari.server.controller.spi.SortRequestProperty;
-import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO.LastServiceCheckDTO;
 import org.apache.ambari.server.orm.dao.ServiceConfigDAO;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.ServiceConfigEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.MaintenanceState;
@@ -73,17 +63,6 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
   private static final Logger LOG = LoggerFactory.getLogger(ServiceCheckValidityCheck.class);
 
   private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("MM-dd-yyyy hh:mm:ss");
-  private static List<SortRequestProperty> sortRequestProperties =
-      Collections.singletonList(new SortRequestProperty(TaskResourceProvider.TASK_START_TIME_PROPERTY_ID, SortRequest.Order.DESC));
-  private static SortRequest sortRequest = new SortRequestImpl(sortRequestProperties);
-  private static final PageRequestImpl PAGE_REQUEST = new PageRequestImpl(PageRequest.StartingPoint.End, 1000, 0, null, null);
-  private static final RequestImpl REQUEST = new RequestImpl(null, null, null, null, sortRequest, PAGE_REQUEST);
-  private static final Predicate PREDICATE = new PredicateBuilder()
-    .property(TaskResourceProvider.TASK_COMMAND_PROPERTY_ID).equals(RoleCommand.SERVICE_CHECK.name())
-    .and().property(TaskResourceProvider.TASK_START_TIME_PROPERTY_ID).greaterThan(-1)
-    .toPredicate();
-
-
 
   @Inject
   Provider<ServiceConfigDAO> serviceConfigDAOProvider;
@@ -91,6 +70,8 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
   @Inject
   Provider<HostRoleCommandDAO> hostRoleCommandDAOProvider;
 
+  @Inject
+  Provider<ActionMetadata> actionMetadataProvider;
 
   /**
    * Constructor.
@@ -113,8 +94,8 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
     final Cluster cluster = clustersProvider.get().getCluster(clusterName);
     long clusterId = cluster.getClusterId();
 
+    // build a mapping of the last config changes by service
     Map<String, Long> lastServiceConfigUpdates = new HashMap<>();
-
     for (Service service : cluster.getServices().values()) {
       if (service.getMaintenanceState() != MaintenanceState.OFF || !hasAtLeastOneComponentVersionAdvertised(service)) {
         continue;
@@ -130,43 +111,34 @@ public class ServiceCheckValidityCheck extends AbstractCheckDescriptor {
       }
     }
 
-    List<HostRoleCommandEntity> commands = hostRoleCommandDAO.findAll(REQUEST, PREDICATE);
-
-    // !!! build a map of Role to latest-config-check in case it was rerun multiple times, we want the latest
-    Map<Role, HostRoleCommandEntity> latestTimestamps = new HashMap<>();
-    for (HostRoleCommandEntity command : commands) {
-      Role role = command.getRole();
-
-      // Because results are already sorted by start_time desc, first occurrence is guaranteed to have max(start_time).
-      if (!latestTimestamps.containsKey(role)) {
-        latestTimestamps.put(role, command);
-      }
+    // get the latest service checks, grouped by role
+    List<LastServiceCheckDTO> lastServiceChecks = hostRoleCommandDAO.getLatestServiceChecksByRole(clusterId);
+    Map<String, Long> lastServiceChecksByRole = new HashMap<>();
+    for( LastServiceCheckDTO lastServiceCheck : lastServiceChecks ) {
+      lastServiceChecksByRole.put(lastServiceCheck.role, lastServiceCheck.endTime);
     }
 
     LinkedHashSet<String> failedServiceNames = new LinkedHashSet<>();
-    for (Map.Entry<String, Long> serviceEntry : lastServiceConfigUpdates.entrySet()) {
-      String serviceName = serviceEntry.getKey();
-      Long configTimestamp = serviceEntry.getValue();
-
-      boolean serviceCheckWasExecuted = false;
-      for (HostRoleCommandEntity command : latestTimestamps.values()) {
-        if (null !=  command.getCommandDetail() && command.getCommandDetail().contains(serviceName)) {
-          serviceCheckWasExecuted = true;
-          Long serviceCheckTimestamp = command.getStartTime();
-
-          if (serviceCheckTimestamp < configTimestamp) {
-            failedServiceNames.add(serviceName);
-            LOG.info("Service {} latest config change is {}, latest service check executed at {}",
-                serviceName,
-                DATE_FORMAT.format(new Date(configTimestamp)),
-                DATE_FORMAT.format(new Date(serviceCheckTimestamp)));
-          }
-        }
+
+    // for every service, see if there was a service check executed and then
+    for( Entry<String, Long> entry : lastServiceConfigUpdates.entrySet() ) {
+      String serviceName = entry.getKey();
+      long configCreationTime = entry.getValue();
+      String role = actionMetadataProvider.get().getServiceCheckAction(serviceName);
+
+      if(!lastServiceChecksByRole.containsKey(role) ) {
+        LOG.info("There was no service check found for service {} matching role {}", serviceName, role);
+        failedServiceNames.add(serviceName);
+        continue;
       }
 
-      if (!serviceCheckWasExecuted) {
+      long lastServiceCheckTime = lastServiceChecksByRole.get(role);
+      if (lastServiceCheckTime < configCreationTime) {
         failedServiceNames.add(serviceName);
-        LOG.info("Service {} service check has never been executed", serviceName);
+        LOG.info(
+            "The {} service (role {}) had its configurations updated on {}, but the last service check was {}",
+            serviceName, role, DATE_FORMAT.format(new Date(configCreationTime)),
+            DATE_FORMAT.format(new Date(lastServiceCheckTime)));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index d3e1488..5ffdf26 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -1312,14 +1312,18 @@ public class AmbariCustomCommandExecutionHelper {
     if (actionName.equals(START_COMMAND_NAME) || actionName.equals(RESTART_COMMAND_NAME)) {
       Cluster cluster = clusters.getCluster(clusterName);
       StackId stackId = null;
-      try {
-        Service service = cluster.getService(serviceName);
-        stackId = service.getDesiredStackId();
-      } catch (AmbariException e) {
-        LOG.debug("Could not load service {}, skipping topology check", serviceName);
-        stackId = cluster.getDesiredStackVersion();
+      if (serviceName != null) {
+        try {
+          Service service = cluster.getService(serviceName);
+          stackId = service.getDesiredStackId();
+        } catch (AmbariException e) {
+          LOG.debug("Could not load service {}, skipping topology check", serviceName);
+        }
       }
 
+      if (stackId == null) {
+        stackId = cluster.getDesiredStackVersion();
+      }
 
       AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 817f340..9db5832 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -97,6 +97,7 @@ import org.apache.ambari.server.ServiceNotFoundException;
 import org.apache.ambari.server.StackAccessException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.CommandExecutionType;
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.Stage;
@@ -2451,8 +2452,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         stackId.getStackVersion());
     Map<String, ServiceInfo> servicesMap = ambariMetaInfo.getServices(stackInfo.getName(), stackInfo.getVersion());
 
-    ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
-      scHost.getServiceComponentName()).getExecutionCommand();
+    ExecutionCommandWrapper execCmdWrapper = stage.getExecutionCommandWrapper(hostname, componentName);
+    ExecutionCommand execCmd = execCmdWrapper.getExecutionCommand();
 
     execCmd.setConfigurations(configurations);
     execCmd.setConfigurationAttributes(configurationAttributes);
@@ -3055,15 +3056,22 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
                 }
                 break;
               case INIT:
-                throw new AmbariException("Unsupported transition to INIT for"
-                    + " servicecomponenthost"
-                    + ", clusterName=" + cluster.getClusterName()
-                    + ", clusterId=" + cluster.getClusterId()
-                    + ", serviceName=" + scHost.getServiceName()
-                    + ", componentName=" + scHost.getServiceComponentName()
-                    + ", hostname=" + scHost.getHostName()
-                    + ", currentState=" + oldSchState
-                    + ", newDesiredState=" + newState);
+                if (oldSchState == State.INSTALLED ||
+                    oldSchState == State.INSTALL_FAILED ||
+                    oldSchState == State.INIT) {
+                  scHost.setState(State.INIT);
+                  continue;
+                } else  {
+                  throw new AmbariException("Unsupported transition to INIT for"
+                      + " servicecomponenthost"
+                      + ", clusterName=" + cluster.getClusterName()
+                      + ", clusterId=" + cluster.getClusterId()
+                      + ", serviceName=" + scHost.getServiceName()
+                      + ", componentName=" + scHost.getServiceComponentName()
+                      + ", hostname=" + scHost.getHostName()
+                      + ", currentState=" + oldSchState
+                      + ", newDesiredState=" + newState);
+                }
               default:
                 throw new AmbariException("Unsupported state change operation"
                     + ", newState=" + newState);
@@ -3716,6 +3724,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       for (ServiceComponentHost componentHost : entry.getValue()) {
         try {
           //actually delete the component
+          //TODO update metadata processing according to AMBARI-21587
           entry.getKey().deleteServiceComponentHosts(componentHost.getHostName(), deleteMetaData);
 
           //create cluster-master-service map to update all include/exclude files in one action
@@ -3817,7 +3826,6 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     LOG.debug("Refresh include/exclude files action will be executed for " + serviceMasterMap);
     HashMap<String, String> requestProperties = new HashMap<>();
     requestProperties.put("context", "Update Include/Exclude Files for " + serviceMasterMap.keySet().toString());
-    requestProperties.put("exclusive", "true");
     HashMap<String, String> params = new HashMap<>();
     params.put(AmbariCustomCommandExecutionHelper.UPDATE_FILES_ONLY, String.valueOf(isDecommission));
 
@@ -3838,7 +3846,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     //Create request for command
     ExecuteActionRequest actionRequest = new ExecuteActionRequest(
       clusterName, AmbariCustomCommandExecutionHelper.DECOMMISSION_COMMAND_NAME, null,
-      resourceFilters, null, params, true);
+      resourceFilters, null, params, false);
     //Send action
     createAction(actionRequest, requestProperties);
   }
@@ -4572,7 +4580,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         for (OperatingSystemEntity operatingSystem: repositoryVersion.getOperatingSystems()) {
           if (operatingSystem.getOsType().equals(osType)) {
             for (RepositoryEntity repository: operatingSystem.getRepositories()) {
-              final RepositoryResponse response = new RepositoryResponse(repository.getBaseUrl(), osType, repository.getRepositoryId(), repository.getName(), "", "");
+              final RepositoryResponse response = new RepositoryResponse(repository.getBaseUrl(), osType, repository.getRepositoryId(),
+                      repository.getName(), repository.getDistribution(), repository.getComponents(), "", "");
               if (null != versionDefinitionId) {
                 response.setVersionDefinitionId(versionDefinitionId);
               } else {
@@ -4600,7 +4609,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
         for (RepositoryXml.Repo repo : os.getRepos()) {
           RepositoryResponse resp = new RepositoryResponse(repo.getBaseUrl(), os.getFamily(),
-              repo.getRepoId(), repo.getRepoName(), repo.getMirrorsList(),
+              repo.getRepoId(), repo.getRepoName(), repo.getDistribution(), repo.getComponents(), repo.getMirrorsList(),
               repo.getBaseUrl());
 
           resp.setVersionDefinitionId(versionDefinitionId);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
index cb20328..babdf10 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
@@ -27,18 +27,20 @@ public class ConfigGroupRequest {
   private String clusterName;
   private String groupName;
   private String tag;
+  private String serviceName;
   private String description;
   private String serviceConfigVersionNote;
   private Set<String> hosts;
   private Map<String, Config> configs;
 
   public ConfigGroupRequest(Long id, String clusterName, String groupName,
-                            String tag, String description, Set<String> hosts,
-                            Map<String, Config> configs) {
+                            String tag, String serviceName, String description,
+                            Set<String> hosts, Map<String, Config> configs) {
     this.id = id;
     this.clusterName = clusterName;
     this.groupName = groupName;
     this.tag = tag;
+    this.serviceName = serviceName;
     this.description = description;
     this.hosts = hosts;
     this.configs = configs;
@@ -68,6 +70,14 @@ public class ConfigGroupRequest {
     this.tag = tag;
   }
 
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  public void setServiceName(String serviceName) {
+    this.serviceName = serviceName;
+  }
+
   public String getDescription() {
     return description;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index 5981287..6d72855 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -62,6 +62,7 @@ import org.apache.ambari.server.cleanup.ClasspathScannerUtils;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration.ConnectionPoolType;
 import org.apache.ambari.server.configuration.Configuration.DatabaseType;
+import org.apache.ambari.server.controller.internal.ClusterStackVersionResourceProvider;
 import org.apache.ambari.server.controller.internal.ComponentResourceProvider;
 import org.apache.ambari.server.controller.internal.CredentialResourceProvider;
 import org.apache.ambari.server.controller.internal.HostComponentResourceProvider;
@@ -460,6 +461,7 @@ public class ControllerModule extends AbstractModule {
         .implement(ResourceProvider.class, Names.named("credential"), CredentialResourceProvider.class)
         .implement(ResourceProvider.class, Names.named("kerberosDescriptor"), KerberosDescriptorResourceProvider.class)
         .implement(ResourceProvider.class, Names.named("upgrade"), UpgradeResourceProvider.class)
+        .implement(ResourceProvider.class, Names.named("clusterStackVersion"), ClusterStackVersionResourceProvider.class)
         .build(ResourceProviderFactory.class));
 
     install(new FactoryModuleBuilder().implement(

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
index a7b9d80..29f8e2a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
@@ -227,7 +227,8 @@ class DeleteIdentityHandler {
         calculateConfig(kerberosDescriptor, serviceNames()),
         new HashMap<>(),
         false,
-        new HashMap<>());
+        new HashMap<>(),
+          false);
       return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index bb360b5..20c5708 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -59,6 +59,10 @@ public interface KerberosHelper {
    */
   String DIRECTIVE_COMPONENTS = "regenerate_components";
   /**
+   * directive used to pass host list to regenerate keytabs on
+   */
+  String DIRECTIVE_IGNORE_CONFIGS = "ignore_config_updates";
+  /**
    * directive used to indicate that the enable Kerberos operation should proceed even if the
    * cluster's security type is not changing
    */
@@ -591,6 +595,7 @@ public interface KerberosHelper {
    *                                       values
    * @param configurations                 a Map of configurations to use a replacements for variables
    *                                       in identity fields
+   * @param ignoreHeadless                 boolean value to specify if headless principals must not be processed
    * @return an integer indicating the number of identities added to the data file
    * @throws java.io.IOException if an error occurs while writing a record to the data file
    */
@@ -598,9 +603,8 @@ public interface KerberosHelper {
                     Collection<KerberosIdentityDescriptor> identities,
                     Collection<String> identityFilter, String hostname, String serviceName,
                     String componentName, Map<String, Map<String, String>> kerberosConfigurations,
-                    Map<String, Map<String, String>> configurations)
+                    Map<String, Map<String, String>> configurations, boolean ignoreHeadless)
       throws IOException;
-
   /**
    * Calculates the map of configurations relative to the cluster and host.
    * <p/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 013a063..67b08fd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -265,10 +265,13 @@ public class KerberosHelperImpl implements KerberosHelper {
               Set<String> hostFilter = parseHostFilter(requestProperties);
               Map<String, Set<String>> serviceComponentFilter = parseComponentFilter(requestProperties);
 
+              boolean updateConfigurations = !requestProperties.containsKey(DIRECTIVE_IGNORE_CONFIGS)
+                  || !"true".equalsIgnoreCase(requestProperties.get(DIRECTIVE_IGNORE_CONFIGS));
+
               if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
-                handler = new CreatePrincipalsAndKeytabsHandler(true, true, true);
+                handler = new CreatePrincipalsAndKeytabsHandler(true, updateConfigurations, true);
               } else if ("missing".equalsIgnoreCase(value)) {
-                handler = new CreatePrincipalsAndKeytabsHandler(false, true, true);
+                handler = new CreatePrincipalsAndKeytabsHandler(false, updateConfigurations, true);
               }
 
               if (handler != null) {
@@ -1482,7 +1485,7 @@ public class KerberosHelperImpl implements KerberosHelper {
                            Collection<KerberosIdentityDescriptor> identities,
                            Collection<String> identityFilter, String hostname, String serviceName,
                            String componentName, Map<String, Map<String, String>> kerberosConfigurations,
-                           Map<String, Map<String, String>> configurations)
+                           Map<String, Map<String, String>> configurations, boolean ignoreHeadless)
       throws IOException {
     int identitiesAdded = 0;
 
@@ -1534,7 +1537,8 @@ public class KerberosHelperImpl implements KerberosHelper {
                   keytabFileOwnerAccess,
                   keytabFileGroupName,
                   keytabFileGroupAccess,
-                  (keytabIsCachable) ? "true" : "false");
+                  (keytabIsCachable) ? "true" : "false",
+                  (ignoreHeadless && principalDescriptor.getType() == KerberosPrincipalType.USER) ? "true" : "false");
             }
 
             // Add the principal-related configuration to the map of configurations
@@ -2189,6 +2193,7 @@ public class KerberosHelperImpl implements KerberosHelper {
                   keytabFileOwnerAccess,
                   keytabFileGroupName,
                   keytabFileGroupAccess,
+                  "false",
                   "false");
 
               hostsWithValidKerberosClient.add(hostname);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/RepositoryResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/RepositoryResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/RepositoryResponse.java
index 792a166..8c68f41 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/RepositoryResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/RepositoryResponse.java
@@ -26,6 +26,8 @@ public class RepositoryResponse {
   private String osType;
   private String repoId;
   private String repoName;
+  private String distribution;
+  private String components;
   private String mirrorsList;
   private String defaultBaseUrl;
   private Long repositoryVersionId;
@@ -34,11 +36,14 @@ public class RepositoryResponse {
   private boolean unique;
 
   public RepositoryResponse(String baseUrl, String osType, String repoId,
-                            String repoName, String mirrorsList, String defaultBaseUrl) {
+                            String repoName, String distribution, String components,
+                            String mirrorsList, String defaultBaseUrl) {
     setBaseUrl(baseUrl);
     setOsType(osType);
     setRepoId(repoId);
     setRepoName(repoName);
+    setDistribution(distribution);
+    setComponents(components);
     setMirrorsList(mirrorsList);
     setDefaultBaseUrl(defaultBaseUrl);
   }
@@ -97,6 +102,22 @@ public class RepositoryResponse {
     this.repoName = repoName;
   }
 
+  public String getDistribution() {
+    return distribution;
+  }
+
+  public void setDistribution(String distribution) {
+    this.distribution = distribution;
+  }
+
+  public String getComponents() {
+    return components;
+  }
+
+  public void setComponents(String components) {
+    this.components = components;
+  }
+
   public String getMirrorsList() {
     return mirrorsList;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
index 3912138..0be7199 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
@@ -22,6 +22,7 @@ package org.apache.ambari.server.controller;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.server.controller.internal.ClusterStackVersionResourceProvider;
 import org.apache.ambari.server.controller.internal.UpgradeResourceProvider;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.Resource.Type;
@@ -68,4 +69,7 @@ public interface ResourceProviderFactory {
   @Named("upgrade")
   UpgradeResourceProvider getUpgradeResourceProvider(AmbariManagementController managementController);
 
+  @Named("clusterStackVersion")
+  ClusterStackVersionResourceProvider getClusterStackVersionResourceProvider(AmbariManagementController managementController);
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
index b35b2a8..b4b13eb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
@@ -183,7 +183,7 @@ public abstract class AbstractControllerResourceProvider extends AbstractAuthori
       case StackVersion:
         return new StackVersionResourceProvider(propertyIds, keyPropertyIds, managementController);
       case ClusterStackVersion:
-        return new ClusterStackVersionResourceProvider(managementController);
+        return resourceProviderFactory.getClusterStackVersionResourceProvider(managementController);
       case HostStackVersion:
         return new HostStackVersionResourceProvider(managementController);
       case StackService:

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index 82ff972..1cd2d10 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -86,10 +86,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
   private static final int PROPERTY_REQUEST_CONNECT_TIMEOUT = 5000;
   private static final int PROPERTY_REQUEST_READ_TIMEOUT    = 10000;
 
-  private static final String CLUSTER_NAME_PROPERTY_ID                  = PropertyHelper.getPropertyId("Clusters", "cluster_name");
-  private static final String HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID   = PropertyHelper.getPropertyId("HostRoles", "cluster_name");
-  private static final String HOST_COMPONENT_HOST_NAME_PROPERTY_ID      = PropertyHelper.getPropertyId("HostRoles", "host_name");
-  private static final String HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("HostRoles", "component_name");
   private static final String GANGLIA_SERVER                            = "GANGLIA_SERVER";
   private static final String METRIC_SERVER                             = "METRICS_COLLECTOR";
   private static final String PROPERTIES_CATEGORY = "properties";
@@ -222,6 +218,11 @@ public abstract class AbstractProviderModule implements ProviderModule,
    */
   private final Map<Resource.Type, List<PropertyProvider>> propertyProviders = new HashMap<>();
 
+  /*
+   * TODO: Instantiation for the concrete impl of this class is not done through
+   * dependency injector (guice) so none of these field initialization
+   * are going to work unless refactoring is complete.
+   */
   @Inject
   AmbariManagementController managementController;
 
@@ -523,7 +524,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
 
   @Override
   public String getPort(String clusterName, String componentName, String hostName, boolean httpsEnabled) throws SystemException {
-    // Parent map need not be synchronized
     ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts;
     // Still need double check to ensure single init
     if (!jmxPortMap.containsKey(clusterName)) {
@@ -534,9 +534,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
         }
       }
     }
-
     clusterJmxPorts = jmxPortMap.get(clusterName);
-
     Service.Type service = componentServiceMap.get(componentName);
 
     if (service != null) {
@@ -883,15 +881,14 @@ public abstract class AbstractProviderModule implements ProviderModule,
 
     for (Cluster cluster : clusterMap.values()) {
       String clusterName = cluster.getClusterName();
-
       Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
+
       if (hostComponentMap == null) {
         hostComponentMap = new HashMap<>();
         clusterHostComponentMap.put(clusterName, hostComponentMap);
       }
 
       List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts();
-
       if (!CollectionUtils.isEmpty(serviceComponentHosts)) {
         for (ServiceComponentHost sch : serviceComponentHosts) {
           String componentName = sch.getServiceComponentName();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index b4e1027..5a6e2cc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -1298,12 +1298,16 @@ public class BlueprintConfigurationProcessor {
    *         elements in this property
    */
   private static String[] splitAndTrimStrings(String propertyName) {
-    List<String> namesWithoutWhitespace = new LinkedList<>();
-    for (String service : propertyName.split(",")) {
-      namesWithoutWhitespace.add(service.trim());
-    }
+    if(propertyName != null) {
+      List<String> namesWithoutWhitespace = new LinkedList<>();
+      for (String service : propertyName.split(",")) {
+        namesWithoutWhitespace.add(service.trim());
+      }
 
-    return namesWithoutWhitespace.toArray(new String[namesWithoutWhitespace.size()]);
+      return namesWithoutWhitespace.toArray(new String[namesWithoutWhitespace.size()]);
+    } else {
+      return new String[0];
+    }
   }
 
   /**
@@ -2726,7 +2730,7 @@ public class BlueprintConfigurationProcessor {
     atlasPropsMap.put("atlas.kafka.bootstrap.servers", new MultipleHostTopologyUpdater("KAFKA_BROKER"));
     atlasPropsMap.put("atlas.kafka.zookeeper.connect", new MultipleHostTopologyUpdater("ZOOKEEPER_SERVER"));
     atlasPropsMap.put("atlas.graph.index.search.solr.zookeeper-url", new MultipleHostTopologyUpdater("ZOOKEEPER_SERVER", ',', false, true, true));
-    atlasPropsMap.put("atlas.graph.storage.hostname", new MultipleHostTopologyUpdater("HBASE_MASTER"));
+    atlasPropsMap.put("atlas.graph.storage.hostname", new MultipleHostTopologyUpdater("ZOOKEEPER_SERVER"));
     atlasPropsMap.put("atlas.audit.hbase.zookeeper.quorum", new MultipleHostTopologyUpdater("ZOOKEEPER_SERVER"));
 
     // RANGER_ADMIN

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
index 0ad967b..ab1f05c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
@@ -445,6 +445,8 @@ public class ClientConfigResourceProvider extends AbstractControllerResourceProv
         jsonContent.put("hostname", hostName);
         jsonContent.put("public_hostname", publicHostName);
         jsonContent.put("clusterName", cluster.getClusterName());
+        jsonContent.put("serviceName", serviceName);
+        jsonContent.put("role", componentName);
         jsonConfigurations = gson.toJson(jsonContent);
 
         File tmpDirectory = new File(TMP_PATH);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index 4d5a4ac..ba5a4e7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -41,13 +41,11 @@ import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.actionmanager.StageFactory;
-import org.apache.ambari.server.agent.stomp.MetadataHolder;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.ActionExecutionContext;
 import org.apache.ambari.server.controller.AmbariActionExecutionHelper;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -205,12 +203,6 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
   @Inject
   private static Provider<Clusters> clusters;
 
-  @Inject
-  private static Provider<MetadataHolder> m_metadataHolder;
-
-  @Inject
-  private static Provider<AmbariManagementControllerImpl> m_ambariManagementController;
-
   /**
    * Used for updating the existing stack tools with those of the stack being
    * distributed.
@@ -221,6 +213,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
   /**
    * Constructor.
    */
+  @Inject
   public ClusterStackVersionResourceProvider(
           AmbariManagementController managementController) {
     super(propertyIds, keyPropertyIds, managementController);
@@ -462,7 +455,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
     }
   }
 
-  @Transactional
+  @Transactional(rollbackOn = {RuntimeException.class, SystemException.class, AmbariException.class})
   RequestStatus createOrUpdateHostVersions(Cluster cluster,
       RepositoryVersionEntity repoVersionEntity, VersionDefinitionXml versionDefinitionXml,
       StackId stackId, boolean forceInstalled, Map<String, Object> propertyMap)
@@ -505,6 +498,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
       }
     }
 
+    checkPatchVDFAvailableServices(cluster, repoVersionEntity, versionDefinitionXml);
 
     // the cluster will create/update all of the host versions to the correct state
     List<Host> hostsNeedingInstallCommands = cluster.transitionHostsToInstalling(
@@ -603,9 +597,6 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
       // determine services for the repo
       Set<String> serviceNames = new HashSet<>();
 
-
-      checkPatchVDFAvailableServices(cluster, repoVersionEnt, desiredVersionDefinition);
-
       // !!! limit the serviceNames to those that are detailed for the repository.
       // TODO packages don't have component granularity
       if (RepositoryType.STANDARD != repoVersionEnt.getType()) {
@@ -676,11 +667,13 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
     }
   }
 
-  private ActionExecutionContext getHostVersionInstallCommand(RepositoryVersionEntity repoVersion,
+  @Transactional
+  ActionExecutionContext getHostVersionInstallCommand(RepositoryVersionEntity repoVersion,
       Cluster cluster, AmbariManagementController managementController, AmbariMetaInfo ami,
       final StackId stackId, Set<String> repoServices, Stage stage1, Host host)
           throws SystemException {
 
+
     // Determine repositories for host
     String osFamily = host.getOsFamily();
 
@@ -694,7 +687,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
 
     if (null == osEntity || CollectionUtils.isEmpty(osEntity.getRepositories())) {
       throw new SystemException(String.format("Repositories for os type %s are " +
-          "not defined. Repo version=%s, stackId=%s",
+          "not defined for version %s of Stack %s.",
             osFamily, repoVersion.getVersion(), stackId));
     }