You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2017/10/11 08:38:41 UTC

[3/7] ambari git commit: AMBARI-22190. After merging trunk to branch-3.0-perf some parts of code are missing. (mpapirkovskyy)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
index 654067b..a1415703 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
@@ -31,7 +31,6 @@ import static org.easymock.EasyMock.verify;
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.lang.reflect.Field;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -68,6 +67,7 @@ import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Resource.Type;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -117,6 +117,7 @@ import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
 
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.gson.JsonArray;
 import com.google.gson.JsonObject;
@@ -124,9 +125,10 @@ import com.google.gson.JsonParser;
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
-import com.google.inject.Provider;
 import com.google.inject.util.Modules;
 
+import junit.framework.AssertionFailedError;
+
 
  /**
  * ClusterStackVersionResourceProvider tests.
@@ -144,6 +146,10 @@ public class ClusterStackVersionResourceProviderTest {
   private HostVersionDAO hostVersionDAO;
   private HostComponentStateDAO hostComponentStateDAO;
 
+  private Clusters clusters;
+  private ActionManager actionManager;
+  private AmbariManagementController managementController;
+
   public static final String OS_JSON = "[\n" +
           "   {\n" +
           "      \"repositories\":[\n" +
@@ -177,6 +183,10 @@ public class ClusterStackVersionResourceProviderTest {
     configuration = new Configuration(properties);
     stageFactory = createNiceMock(StageFactory.class);
 
+    clusters = createNiceMock(Clusters.class);
+    actionManager = createNiceMock(ActionManager.class);
+    managementController = createMock(AmbariManagementController.class);
+
     // Initialize injector
     injector = Guice.createInjector(Modules.override(inMemoryModule).with(new MockModule()));
     injector.getInstance(GuiceJpaInitializer.class);
@@ -206,10 +216,6 @@ public class ClusterStackVersionResourceProviderTest {
   }
 
   private void testCreateResources(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     Map<String, String> hostLevelParams = new HashMap<>();
     StackId stackId = new StackId("HDP", "2.0.1");
@@ -256,28 +262,15 @@ public class ClusterStackVersionResourceProviderTest {
     expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
     expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
     // First host contains versionable components
-    final List<ServiceComponentHost> schsH1 = new ArrayList<ServiceComponentHost>(){{
-      add(schDatanode);
-      add(schNamenode);
-      add(schAMS);
-    }};
+    final List<ServiceComponentHost> schsH1 = Lists.newArrayList(schDatanode, schNamenode, schAMS);
     // Second host does not contain versionable components
-    final List<ServiceComponentHost> schsH2 = new ArrayList<ServiceComponentHost>(){{
-      add(schAMS);
-    }};
-
+    final List<ServiceComponentHost> schsH2 = Lists.newArrayList(schAMS);
 
     ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
     hdfsPackage.setName("hdfs");
     List<ServiceOsSpecific.Package> packages = Collections.singletonList(hdfsPackage);
 
-    ActionManager actionManager = createNiceMock(ActionManager.class);
-
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
-    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
-
-    AbstractControllerResourceProvider.init(resourceProviderFactory);
 
     Map<String, Map<String, String>> hostConfigTags = new HashMap<>();
     expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
@@ -288,12 +281,10 @@ public class ClusterStackVersionResourceProviderTest {
     expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
     expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
     expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
-            EasyMock.anyObject(), anyObject(String.class))).
-            andReturn(packages).times((hostCount - 1) * 2); // 1 host has no versionable components, other hosts have 2 services
-//            // that's why we don't send commands to it
-
-    expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
-            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+            EasyMock.<Map<String, String>>anyObject(), anyObject(String.class))).
+            andReturn(packages).anyTimes();
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
@@ -355,18 +346,14 @@ public class ClusterStackVersionResourceProviderTest {
     StageUtils.setTopologyManager(injector.getInstance(TopologyManager.class));
     StageUtils.setConfiguration(injector.getInstance(Configuration.class));
 
+    ResourceProvider provider = createProvider(managementController);
+    injector.injectMembers(provider);
+
     // replay
-    replay(managementController, response, clusters, resourceProviderFactory, csvResourceProvider,
+    replay(managementController, response, clusters,
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, actionManager,
             executionCommand, executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
-    injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
     Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
@@ -616,10 +603,6 @@ public class ClusterStackVersionResourceProviderTest {
    }
 
    private void testCreateResourcesWithRepoDefinition(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.0.1");
 
@@ -704,7 +687,7 @@ public class ClusterStackVersionResourceProviderTest {
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
     ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ResourceProvider.class);
 
     AbstractControllerResourceProvider.init(resourceProviderFactory);
 
@@ -723,6 +706,10 @@ public class ClusterStackVersionResourceProviderTest {
     expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
             eq(managementController))).andReturn(csvResourceProvider).anyTimes();
 
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
+
+
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
         hostsForCluster).anyTimes();
@@ -794,12 +781,7 @@ public class ClusterStackVersionResourceProviderTest {
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schHBM, actionManager,
             executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -857,10 +839,6 @@ public class ClusterStackVersionResourceProviderTest {
 
     String os_json = json.toString();
 
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.0.1");
 
@@ -944,10 +922,7 @@ public class ClusterStackVersionResourceProviderTest {
     ActionManager actionManager = createNiceMock(ActionManager.class);
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
-    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
-
-    AbstractControllerResourceProvider.init(resourceProviderFactory);
+    ResourceProvider csvResourceProvider = createNiceMock(ResourceProvider.class);
 
     Map<String, Map<String, String>> hostConfigTags = new HashMap<>();
     expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
@@ -961,8 +936,8 @@ public class ClusterStackVersionResourceProviderTest {
             EasyMock.anyObject(), anyObject(String.class))).
             andReturn(packages).anyTimes(); // only one host has the versionable component
 
-    expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
-            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+    .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
@@ -990,11 +965,9 @@ public class ClusterStackVersionResourceProviderTest {
     expect(cluster.transitionHostsToInstalling(anyObject(RepositoryVersionEntity.class),
         anyObject(VersionDefinitionXml.class), eq(false))).andReturn(hostsNeedingInstallCommands).atLeastOnce();
 
-//    ExecutionCommand executionCommand = createNiceMock(ExecutionCommand.class);
     ExecutionCommand executionCommand = new ExecutionCommand();
     ExecutionCommandWrapper executionCommandWrapper = createNiceMock(ExecutionCommandWrapper.class);
 
-//    expect(executionCommand.getHostLevelParams()).andReturn(new HashMap<String, String>()).atLeastOnce();
     expect(executionCommandWrapper.getExecutionCommand()).andReturn(executionCommand).anyTimes();
 
     Stage stage = createNiceMock(Stage.class);
@@ -1029,16 +1002,11 @@ public class ClusterStackVersionResourceProviderTest {
     StageUtils.setConfiguration(injector.getInstance(Configuration.class));
 
     // replay
-    replay(managementController, response, clusters, hdfsService, hbaseService, resourceProviderFactory, csvResourceProvider,
+    replay(managementController, response, clusters, hdfsService, hbaseService, csvResourceProvider,
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schHBM, actionManager,
             executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1094,10 +1062,6 @@ public class ClusterStackVersionResourceProviderTest {
    }
 
    private void testCreateResourcesMixed(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     Map<String, String> hostLevelParams = new HashMap<>();
     StackId stackId = new StackId("HDP", "2.0.1");
@@ -1162,16 +1126,9 @@ public class ClusterStackVersionResourceProviderTest {
     expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
     expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
     // First host contains versionable components
-    final List<ServiceComponentHost> schsH1 = new ArrayList<ServiceComponentHost>(){{
-      add(schDatanode);
-      add(schNamenode);
-      add(schAMS);
-    }};
+    final List<ServiceComponentHost> schsH1 = Lists.newArrayList(schDatanode, schNamenode, schAMS);
     // Second host does not contain versionable components
-    final List<ServiceComponentHost> schsH2 = new ArrayList<ServiceComponentHost>(){{
-      add(schAMS);
-    }};
-
+    final List<ServiceComponentHost> schsH2 = Lists.newArrayList(schAMS);
 
     ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
     hdfsPackage.setName("hdfs");
@@ -1180,10 +1137,6 @@ public class ClusterStackVersionResourceProviderTest {
     ActionManager actionManager = createNiceMock(ActionManager.class);
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
-    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
-
-    AbstractControllerResourceProvider.init(resourceProviderFactory);
 
     Map<String, Map<String, String>> hostConfigTags = new HashMap<>();
     expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
@@ -1194,12 +1147,11 @@ public class ClusterStackVersionResourceProviderTest {
     expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
     expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
     expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
-            EasyMock.anyObject(), anyObject(String.class))).
-            andReturn(packages).times((hostCount - 1) * 2); // 1 host has no versionable components, other hosts have 2 services
-//            // that's why we don't send commands to it
+            EasyMock.<Map<String, String>>anyObject(), anyObject(String.class))).
+            andReturn(packages).anyTimes();
 
-    expect(resourceProviderFactory.getHostResourceProvider(EasyMock.anyObject(), EasyMock.anyObject(),
-            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
 
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
     expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
@@ -1265,16 +1217,11 @@ public class ClusterStackVersionResourceProviderTest {
 
 
     // replay
-    replay(managementController, response, clusters, resourceProviderFactory, csvResourceProvider,
+    replay(managementController, response, clusters,
             cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, actionManager,
             executionCommand, executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1327,10 +1274,6 @@ public class ClusterStackVersionResourceProviderTest {
    */
   @Test
   public void testCreateResourcesInInstalledState() throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.2.0");
     String repoVersion = "2.2.0.1-885";
@@ -1406,8 +1349,7 @@ public class ClusterStackVersionResourceProviderTest {
 
     RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
     ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
-    ResourceProvider csvResourceProvider = createNiceMock(
-        ClusterStackVersionResourceProvider.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ResourceProvider.class);
 
     AbstractControllerResourceProvider.init(resourceProviderFactory);
 
@@ -1452,10 +1394,7 @@ public class ClusterStackVersionResourceProviderTest {
         csvResourceProvider, cluster, repositoryVersionDAOMock, configHelper, schDatanode,
         stageFactory, hostVersionDAO);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(type,
-        PropertyHelper.getPropertyIds(type), PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request. add more maps for multiple
@@ -1499,10 +1438,6 @@ public class ClusterStackVersionResourceProviderTest {
 
   @Test
   public void testCreateResourcesPPC() throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     Map<String, String> hostLevelParams = new HashMap<>();
     StackId stackId = new StackId("HDP", "2.0.1");
@@ -1565,16 +1500,9 @@ public class ClusterStackVersionResourceProviderTest {
     expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
     expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
     // First host contains versionable components
-    final List<ServiceComponentHost> schsH1 = new ArrayList<ServiceComponentHost>(){{
-      add(schDatanode);
-      add(schNamenode);
-      add(schAMS);
-    }};
+    final List<ServiceComponentHost> schsH1 = Lists.newArrayList(schDatanode, schNamenode, schAMS);
     // Second host does not contain versionable components
-    final List<ServiceComponentHost> schsH2 = new ArrayList<ServiceComponentHost>(){{
-      add(schAMS);
-    }};
-
+    final List<ServiceComponentHost> schsH2 = Lists.newArrayList(schAMS);
 
     ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
     hdfsPackage.setName("hdfs");
@@ -1598,8 +1526,11 @@ public class ClusterStackVersionResourceProviderTest {
     expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
     expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
             (Map<String, String>) anyObject(List.class), anyObject(String.class))).
-            andReturn(packages).anyTimes(); // 1 host has no versionable components, other hosts have 2 services
-  //            // that's why we don't send commands to it
+            andReturn(packages).anyTimes();
+
+    expect(managementController.findConfigurationTagsWithOverrides(anyObject(Cluster.class), EasyMock.anyString()))
+      .andReturn(new HashMap<String, Map<String, String>>()).anyTimes();
+
 
     expect(resourceProviderFactory.getHostResourceProvider(anyObject(Set.class), anyObject(Map.class),
             eq(managementController))).andReturn(csvResourceProvider).anyTimes();
@@ -1670,12 +1601,7 @@ public class ClusterStackVersionResourceProviderTest {
             cluster, repoVersion, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, actionManager,
             executionCommand, executionCommandWrapper,stage, stageFactory);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1709,10 +1635,6 @@ public class ClusterStackVersionResourceProviderTest {
 
   @Test
   public void testGetSorted() throws Exception {
-
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    final Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
     StackId stackId = new StackId("HDP", "2.2.0");
 
@@ -1764,19 +1686,17 @@ public class ClusterStackVersionResourceProviderTest {
         csvResourceProvider, cluster, repositoryVersionDAOMock, configHelper,
         stageFactory, hostVersionDAO);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(type,
-        PropertyHelper.getPropertyIds(type), PropertyHelper.getKeyPropertyIds(type),
-        /*managementController*/null);
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
-    Field field = ClusterStackVersionResourceProvider.class.getDeclaredField("clusters");
-    field.setAccessible(true);
-    field.set(null, new Provider<Clusters>() {
-      @Override
-      public Clusters get() {
-        return clusters;
-      }
-    });
+//    Field field = ClusterStackVersionResourceProvider.class.getDeclaredField("clusters");
+//    field.setAccessible(true);
+//    field.set(null, new Provider<Clusters>() {
+//      @Override
+//      public Clusters get() {
+//        return clusters;
+//      }
+//    });
 
     // set the security auth
     SecurityContextHolder.getContext().setAuthentication(
@@ -1852,6 +1772,9 @@ public class ClusterStackVersionResourceProviderTest {
 
     expect(desiredVersionDefinition.getAvailableServices((StackInfo)EasyMock.anyObject())).andReturn(availableServices).once();
 
+    expect(cluster.transitionHostsToInstalling(
+        anyObject(RepositoryVersionEntity.class), anyObject(VersionDefinitionXml.class),
+        EasyMock.anyBoolean())).andReturn(Collections.<Host>emptyList()).atLeastOnce();
 
     replay(cluster, repoVersionEnt, desiredVersionDefinition, service1, service2, availableService1, availableService2);
 
@@ -1890,7 +1813,9 @@ public class ClusterStackVersionResourceProviderTest {
      availableServices.add(availableService2);
 
      expect(desiredVersionDefinition.getAvailableServices((StackInfo)EasyMock.anyObject())).andReturn(availableServices).once();
-
+     expect(cluster.transitionHostsToInstalling(
+         anyObject(RepositoryVersionEntity.class), anyObject(VersionDefinitionXml.class),
+         EasyMock.anyBoolean())).andThrow(new AssertionFailedError()).anyTimes();
 
      replay(cluster, repoVersionEnt, desiredVersionDefinition, service1, availableService1, availableService2);
 
@@ -1906,10 +1831,6 @@ public class ClusterStackVersionResourceProviderTest {
    }
 
    private void testCreateResourcesExistingUpgrade(Authentication authentication) throws Exception {
-    Resource.Type type = Resource.Type.ClusterStackVersion;
-
-    AmbariManagementController managementController = createMock(AmbariManagementController.class);
-    Clusters clusters = createNiceMock(Clusters.class);
     Cluster cluster = createNiceMock(Cluster.class);
 
     expect(managementController.getClusters()).andReturn(clusters).anyTimes();
@@ -1927,12 +1848,7 @@ public class ClusterStackVersionResourceProviderTest {
     // replay
     replay(managementController, clusters, cluster);
 
-    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
-        type,
-        PropertyHelper.getPropertyIds(type),
-        PropertyHelper.getKeyPropertyIds(type),
-        managementController);
-
+    ResourceProvider provider = createProvider(managementController);
     injector.injectMembers(provider);
 
     // add the property map to a set for the request.  add more maps for multiple creates
@@ -1964,6 +1880,17 @@ public class ClusterStackVersionResourceProviderTest {
     verify(cluster);
 
   }
+
+   private ClusterStackVersionResourceProvider createProvider(AmbariManagementController amc) {
+     ResourceProviderFactory factory = injector.getInstance(ResourceProviderFactory.class);
+     AbstractControllerResourceProvider.init(factory);
+
+     Resource.Type type = Type.ClusterStackVersion;
+     return (ClusterStackVersionResourceProvider) AbstractControllerResourceProvider.getResourceProvider(type,
+         PropertyHelper.getPropertyIds(type), PropertyHelper.getKeyPropertyIds(type),
+         amc);
+   }
+
   private class MockModule extends AbstractModule {
     @Override
     protected void configure() {
@@ -1973,6 +1900,9 @@ public class ClusterStackVersionResourceProviderTest {
       bind(StageFactory.class).toInstance(stageFactory);
       bind(HostVersionDAO.class).toInstance(hostVersionDAO);
       bind(HostComponentStateDAO.class).toInstance(hostComponentStateDAO);
+      bind(Clusters.class).toInstance(clusters);
+      bind(ActionManager.class).toInstance(actionManager);
+      bind(AmbariManagementController.class).toInstance(managementController);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
index 0ced822..1ea4b9a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
@@ -217,6 +217,7 @@ public class ComponentResourceProviderTest {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", 1);
     serviceComponentStateCountMap.put("installedCount", 0);
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", 0);
     serviceComponentStateCountMap.put("installFailedCount", 0);
     serviceComponentStateCountMap.put("initCount", 0);
     serviceComponentStateCountMap.put("unknownCount", 1);
@@ -280,6 +281,7 @@ public class ComponentResourceProviderTest {
     propertyIds.add(ComponentResourceProvider.COMPONENT_TOTAL_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID);
+    propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALL_FAILED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INIT_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_UNKNOWN_COUNT_PROPERTY_ID);
@@ -311,6 +313,8 @@ public class ComponentResourceProviderTest {
       Assert.assertEquals(0, resource.getPropertyValue(
         ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
+        ComponentResourceProvider.COMPONENT_INSTALLED_AND_MAINTENANCE_OFF_COUNT_PROPERTY_ID));
+      Assert.assertEquals(0, resource.getPropertyValue(
           ComponentResourceProvider.COMPONENT_INSTALL_FAILED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
           ComponentResourceProvider.COMPONENT_INIT_COUNT_PROPERTY_ID));
@@ -379,6 +383,7 @@ public class ComponentResourceProviderTest {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", 0);
     serviceComponentStateCountMap.put("installedCount", 1);
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", 0);
     serviceComponentStateCountMap.put("installFailedCount", 0);
     serviceComponentStateCountMap.put("initCount", 0);
     serviceComponentStateCountMap.put("unknownCount", 0);
@@ -691,6 +696,7 @@ public class ComponentResourceProviderTest {
     Map <String, Integer> serviceComponentStateCountMap = new HashMap<>();
     serviceComponentStateCountMap.put("startedCount", 0);
     serviceComponentStateCountMap.put("installedCount", 1);
+    serviceComponentStateCountMap.put("installedAndMaintenanceOffCount", 0);
     serviceComponentStateCountMap.put("installFailedCount", 0);
     serviceComponentStateCountMap.put("initCount", 0);
     serviceComponentStateCountMap.put("unknownCount", 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
index 6a0ab89..ca5cde0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
@@ -48,13 +48,15 @@ public class RepositoryResourceProviderTest {
   private static final String VAL_REPO_ID = "HDP-0.2";
   private static final String VAL_REPO_NAME = "HDP1";
   private static final String VAL_BASE_URL = "http://foo.com";
+  private static final String VAL_DISTRIBUTION = "mydist";
+  private static final String VAL_COMPONENT_NAME = "mycomponentname";
 
   @Test
   public void testGetResources() throws Exception{
     AmbariManagementController managementController = EasyMock.createMock(AmbariManagementController.class);
 
     RepositoryResponse rr = new RepositoryResponse(VAL_BASE_URL, VAL_OS,
-        VAL_REPO_ID, VAL_REPO_NAME, null, null);
+        VAL_REPO_ID, VAL_REPO_NAME, VAL_DISTRIBUTION, VAL_COMPONENT_NAME, null, null);
     rr.setStackName(VAL_STACK_NAME);
     rr.setStackVersion(VAL_STACK_VERSION);
     Set<RepositoryResponse> allResponse = new HashSet<>();
@@ -76,6 +78,8 @@ public class RepositoryResourceProviderTest {
     propertyIds.add(RepositoryResourceProvider.REPOSITORY_OS_TYPE_PROPERTY_ID);
     propertyIds.add(RepositoryResourceProvider.REPOSITORY_REPO_ID_PROPERTY_ID);
     propertyIds.add(RepositoryResourceProvider.REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
+    propertyIds.add(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+    propertyIds.add(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID);
 
     Predicate predicate =
         new PredicateBuilder().property(RepositoryResourceProvider.REPOSITORY_STACK_NAME_PROPERTY_ID).equals(VAL_STACK_NAME)
@@ -111,6 +115,12 @@ public class RepositoryResourceProviderTest {
 
       o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
       Assert.assertNull(o);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_DISTRIBUTION);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_COMPONENT_NAME);
     }
 
     // !!! check that the stack version id is returned
@@ -139,6 +149,12 @@ public class RepositoryResourceProviderTest {
 
       o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
       Assert.assertEquals(525L, o);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_DISTRIBUTION_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_DISTRIBUTION);
+
+      o = resource.getPropertyValue(RepositoryResourceProvider.REPOSITORY_COMPONENTS_PROPERTY_ID);
+      Assert.assertEquals(o, VAL_COMPONENT_NAME);
     }
 
     // verify
@@ -152,7 +168,7 @@ public class RepositoryResourceProviderTest {
     AmbariManagementController managementController = EasyMock.createMock(AmbariManagementController.class);
 
     RepositoryResponse rr = new RepositoryResponse(VAL_BASE_URL, VAL_OS,
-        VAL_REPO_ID, VAL_REPO_NAME, null, null);
+        VAL_REPO_ID, VAL_REPO_NAME, null, null, null, null);
     Set<RepositoryResponse> allResponse = new HashSet<>();
     allResponse.add(rr);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
index bb3fa8f..f13aeed 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackUpgradeConfigurationMergeTest.java
@@ -33,6 +33,7 @@ import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.HostRoleCommandFactoryImpl;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AbstractRootServiceResponseFactory;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.KerberosHelper;
@@ -56,8 +57,10 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
@@ -65,6 +68,7 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.UpgradeHelper;
+import org.apache.ambari.server.state.ValueAttributesInfo;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -95,12 +99,15 @@ import com.google.inject.assistedinject.FactoryModuleBuilder;
 public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {
 
   private Injector m_injector;
+  private AmbariMetaInfo m_metainfo;
 
   /**
    * @throws Exception
    */
   @Before
   public void before() throws Exception {
+    m_metainfo = createNiceMock(AmbariMetaInfo.class);
+
     MockModule mockModule = new MockModule();
 
     // create an injector which will inject the mocks
@@ -286,6 +293,134 @@ public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {
     assertEquals("stack-220-original", expectedBarType.get("bar-property-2"));
   }
 
+  /**
+   * Tests that any read-only properties are not taken from the existing
+   * configs, but from the new stack value.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testReadOnlyPropertyIsTakenFromTargetStack() throws Exception {
+    RepositoryVersionEntity repoVersion211 = createNiceMock(RepositoryVersionEntity.class);
+    RepositoryVersionEntity repoVersion220 = createNiceMock(RepositoryVersionEntity.class);
+
+    StackId stack211 = new StackId("HDP-2.1.1");
+    StackId stack220 = new StackId("HDP-2.2.0");
+
+    String version211 = "2.1.1.0-1234";
+    String version220 = "2.2.0.0-1234";
+
+    expect(repoVersion211.getStackId()).andReturn(stack211).atLeastOnce();
+    expect(repoVersion211.getVersion()).andReturn(version211).atLeastOnce();
+
+    expect(repoVersion220.getStackId()).andReturn(stack220).atLeastOnce();
+    expect(repoVersion220.getVersion()).andReturn(version220).atLeastOnce();
+
+    String fooSite = "foo-site";
+    String fooPropertyName = "foo-property-1";
+    String serviceName = "ZOOKEEPER";
+
+    Map<String, Map<String, String>> stack211Configs = new HashMap<>();
+    Map<String, String> stack211FooType = new HashMap<>();
+    stack211Configs.put(fooSite, stack211FooType);
+    stack211FooType.put(fooPropertyName, "stack-211-original");
+
+    Map<String, Map<String, String>> stack220Configs = new HashMap<>();
+    Map<String, String> stack220FooType = new HashMap<>();
+    stack220Configs.put(fooSite, stack220FooType);
+    stack220FooType.put(fooPropertyName, "stack-220-original");
+
+    PropertyInfo readOnlyProperty = new PropertyInfo();
+    ValueAttributesInfo valueAttributesInfo = new ValueAttributesInfo();
+    valueAttributesInfo.setReadOnly(true);
+    readOnlyProperty.setName(fooPropertyName);
+    readOnlyProperty.setFilename(fooSite + ".xml");
+    readOnlyProperty.setPropertyValueAttributes(null);
+    readOnlyProperty.setPropertyValueAttributes(valueAttributesInfo);
+
+    expect(m_metainfo.getServiceProperties(stack211.getStackName(), stack211.getStackVersion(),
+        serviceName)).andReturn(Sets.newHashSet(readOnlyProperty)).atLeastOnce();
+
+    Map<String, String> existingFooType = new HashMap<>();
+
+    ClusterConfigEntity fooConfigEntity = createNiceMock(ClusterConfigEntity.class);
+
+    expect(fooConfigEntity.getType()).andReturn(fooSite);
+
+    Config fooConfig = createNiceMock(Config.class);
+
+    existingFooType.put(fooPropertyName, "my-foo-property-1");
+
+    expect(fooConfig.getType()).andReturn(fooSite).atLeastOnce();
+    expect(fooConfig.getProperties()).andReturn(existingFooType);
+
+    Map<String, DesiredConfig> desiredConfigurations = new HashMap<>();
+    desiredConfigurations.put(fooSite, null);
+
+    Service zookeeper = createNiceMock(Service.class);
+    expect(zookeeper.getName()).andReturn(serviceName).atLeastOnce();
+    expect(zookeeper.getServiceComponents()).andReturn(new HashMap<String, ServiceComponent>()).once();
+    zookeeper.setDesiredRepositoryVersion(repoVersion220);
+    expectLastCall().once();
+
+    Cluster cluster = createNiceMock(Cluster.class);
+    expect(cluster.getCurrentStackVersion()).andReturn(stack211).atLeastOnce();
+    expect(cluster.getDesiredStackVersion()).andReturn(stack220);
+    expect(cluster.getDesiredConfigs()).andReturn(desiredConfigurations);
+    expect(cluster.getDesiredConfigByType(fooSite)).andReturn(fooConfig);
+    expect(cluster.getService(serviceName)).andReturn(zookeeper);
+
+    ConfigHelper configHelper = m_injector.getInstance(ConfigHelper.class);
+
+    expect(configHelper.getDefaultProperties(stack211, serviceName)).andReturn(stack211Configs).anyTimes();
+    expect(configHelper.getDefaultProperties(stack220, serviceName)).andReturn(stack220Configs).anyTimes();
+
+    Capture<Map<String, Map<String, String>>> expectedConfigurationsCapture = EasyMock.newCapture();
+
+    configHelper.createConfigTypes(EasyMock.anyObject(Cluster.class),
+        EasyMock.anyObject(StackId.class), EasyMock.anyObject(AmbariManagementController.class),
+        EasyMock.capture(expectedConfigurationsCapture), EasyMock.anyObject(String.class),
+        EasyMock.anyObject(String.class));
+
+    expectLastCall().once();
+
+    // mock the service config DAO and replay it
+    ServiceConfigEntity zookeeperServiceConfig = createNiceMock(ServiceConfigEntity.class);
+    expect(zookeeperServiceConfig.getClusterConfigEntities()).andReturn(
+        Lists.newArrayList(fooConfigEntity));
+
+    ServiceConfigDAO serviceConfigDAOMock = m_injector.getInstance(ServiceConfigDAO.class);
+    List<ServiceConfigEntity> latestServiceConfigs = Lists.newArrayList(zookeeperServiceConfig);
+    expect(serviceConfigDAOMock.getLastServiceConfigsForService(EasyMock.anyLong(),
+        eq(serviceName))).andReturn(latestServiceConfigs).once();
+
+    UpgradeContext context = createNiceMock(UpgradeContext.class);
+    expect(context.getCluster()).andReturn(cluster).atLeastOnce();
+    expect(context.getType()).andReturn(UpgradeType.ROLLING).atLeastOnce();
+    expect(context.getDirection()).andReturn(Direction.UPGRADE).atLeastOnce();
+    expect(context.getRepositoryVersion()).andReturn(repoVersion220).anyTimes();
+    expect(context.getSupportedServices()).andReturn(Sets.newHashSet(serviceName)).atLeastOnce();
+    expect(context.getSourceRepositoryVersion(EasyMock.anyString())).andReturn(repoVersion211).atLeastOnce();
+    expect(context.getTargetRepositoryVersion(EasyMock.anyString())).andReturn(repoVersion220).atLeastOnce();
+    expect(context.getOrchestrationType()).andReturn(RepositoryType.STANDARD).anyTimes();
+    expect(context.getHostRoleCommandFactory()).andStubReturn(m_injector.getInstance(HostRoleCommandFactory.class));
+    expect(context.getRoleGraphFactory()).andStubReturn(m_injector.getInstance(RoleGraphFactory.class));
+
+    replayAll();
+
+    UpgradeHelper upgradeHelper = m_injector.getInstance(UpgradeHelper.class);
+    upgradeHelper.updateDesiredRepositoriesAndConfigs(context);
+
+    Map<String, Map<String, String>> expectedConfigurations = expectedConfigurationsCapture.getValue();
+    Map<String, String> expectedFooType = expectedConfigurations.get(fooSite);
+
+    // As the upgrade pack did not have any Flume updates, its configs should
+    // not be updated.
+    assertEquals(1, expectedConfigurations.size());
+    assertEquals(1, expectedFooType.size());
+
+    assertEquals("stack-220-original", expectedFooType.get(fooPropertyName));
+  }
 
   private class MockModule implements Module {
 
@@ -325,6 +460,7 @@ public class StackUpgradeConfigurationMergeTest extends EasyMockSupport {
       binder.bind(ServiceConfigDAO.class).toInstance(createNiceMock(ServiceConfigDAO.class));
       binder.install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
       binder.bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
+      binder.bind(AmbariMetaInfo.class).toInstance(m_metainfo);
 
       binder.requestStaticInjection(UpgradeResourceProvider.class);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 37a7b44..fea56d9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -84,6 +84,7 @@ import org.apache.ambari.server.orm.entities.UpgradeHistoryEntity;
 import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
 import org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryAction;
+import org.apache.ambari.server.serveraction.upgrades.ConfigureAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -1733,6 +1734,113 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
     upgradeResourceProvider.createResources(request);
   }
 
+  @Test
+  public void testCreatePatchWithConfigChanges() throws Exception {
+    Cluster cluster = clusters.getCluster("c1");
+
+    File f = new File("src/test/resources/version_definition_test_patch_config.xml");
+    repoVersionEntity2112.setType(RepositoryType.PATCH);
+    repoVersionEntity2112.setVersionXml(IOUtils.toString(new FileInputStream(f)));
+    repoVersionEntity2112.setVersionXsd("version_definition.xsd");
+    repoVersionDao.merge(repoVersionEntity2112);
+
+    List<UpgradeEntity> upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
+    assertEquals(0, upgrades.size());
+
+    Map<String, Object> requestProps = new HashMap<>();
+    requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
+    requestProps.put(UpgradeResourceProvider.UPGRADE_REPO_VERSION_ID, String.valueOf(repoVersionEntity2112.getId()));
+    requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+    requestProps.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, "true");
+    requestProps.put(UpgradeResourceProvider.UPGRADE_DIRECTION, Direction.UPGRADE.name());
+
+    // !!! test that a PATCH upgrade skips config changes
+    ResourceProvider upgradeResourceProvider = createProvider(amc);
+
+    Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+    RequestStatus status = upgradeResourceProvider.createResources(request);
+    Set<Resource> resources = status.getAssociatedResources();
+    assertEquals(1, resources.size());
+    Long requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+    assertNotNull(requestId);
+
+    UpgradeEntity upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+    assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+    HostRoleCommandDAO hrcDAO = injector.getInstance(HostRoleCommandDAO.class);
+    List<HostRoleCommandEntity> commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+    boolean foundConfigTask = false;
+    for (HostRoleCommandEntity command : commands) {
+      if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+          command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+        foundConfigTask = true;
+        break;
+      }
+    }
+    assertFalse(foundConfigTask);
+
+    // !!! test that a patch with a supported patch change gets picked up
+    cluster.setUpgradeEntity(null);
+    requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test_force_config_change");
+    request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+
+    status = upgradeResourceProvider.createResources(request);
+    resources = status.getAssociatedResources();
+    assertEquals(1, resources.size());
+    requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+    assertNotNull(requestId);
+
+    upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+    assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+    commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+    foundConfigTask = false;
+    for (HostRoleCommandEntity command : commands) {
+      if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+          command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+        foundConfigTask = true;
+        break;
+      }
+    }
+    assertTrue(foundConfigTask);
+
+
+
+    // !!! test that a regular upgrade will pick up the config change
+    cluster.setUpgradeEntity(null);
+    repoVersionEntity2112.setType(RepositoryType.STANDARD);
+    repoVersionDao.merge(repoVersionEntity2112);
+
+    requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+    request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+
+    status = upgradeResourceProvider.createResources(request);
+    resources = status.getAssociatedResources();
+    assertEquals(1, resources.size());
+    requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+    assertNotNull(requestId);
+
+    upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+    assertEquals(RepositoryType.STANDARD, upgradeEntity.getOrchestration());
+
+    commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+    foundConfigTask = false;
+    for (HostRoleCommandEntity command : commands) {
+      if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+          command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+        foundConfigTask = true;
+        break;
+      }
+    }
+    assertTrue(foundConfigTask);
+
+  }
+
+
+
   private String parseSingleMessage(String msgStr){
     JsonParser parser = new JsonParser();
     JsonArray msgArray = (JsonArray) parser.parse(msgStr);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
index b066324..ffacab9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
@@ -306,6 +306,11 @@ public class StackVersionListenerTest extends EasyMockSupport {
     RepositoryVersionDAO dao = createNiceMock(RepositoryVersionDAO.class);
     RepositoryVersionEntity entity = createNiceMock(RepositoryVersionEntity.class);
     expect(entity.getVersion()).andReturn("2.4.0.0").once();
+
+    // when the version gets reported back, we set this repo to resolved
+    entity.setResolved(true);
+    expectLastCall().once();
+
     expect(dao.findByPK(1L)).andReturn(entity).once();
     expect(dao.merge(entity)).andReturn(entity).once();
 
@@ -325,6 +330,47 @@ public class StackVersionListenerTest extends EasyMockSupport {
   }
 
   /**
+   * Tests that if a component advertises a version and the repository already
+   * matches, that we ensure that it is marked as resolved.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testRepositoryResolvedWhenVersionsMatch() throws Exception {
+    String version = "2.4.0.0";
+
+    expect(sch.getVersion()).andReturn(version);
+    expect(componentInfo.isVersionAdvertised()).andReturn(true).once();
+
+    RepositoryVersionDAO dao = createNiceMock(RepositoryVersionDAO.class);
+    RepositoryVersionEntity entity = createNiceMock(RepositoryVersionEntity.class);
+    expect(entity.getVersion()).andReturn(version).once();
+    expect(entity.isResolved()).andReturn(false).once();
+
+    // when the version gets reported back, we set this repo to resolved
+    entity.setResolved(true);
+    expectLastCall().once();
+
+    expect(dao.findByPK(1L)).andReturn(entity).once();
+    expect(dao.merge(entity)).andReturn(entity).once();
+
+    replayAll();
+
+    String newVersion = version;
+
+    HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cluster, sch, newVersion, 1L);
+
+    // !!! avoid injector for test class
+    Field field = StackVersionListener.class.getDeclaredField("repositoryVersionDAO");
+    field.setAccessible(true);
+    field.set(listener, dao);
+
+    listener.onAmbariEvent(event);
+
+    verifyAll();
+  }
+
+  /**
    * Tests that the {@link RepositoryVersionEntity} is not updated if there is
    * an upgrade, even if the repo ID is passed back and the versions don't
    * match.

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
index 8f587be..bc1ab47 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/scheduler/ExecutionScheduleManagerTest.java
@@ -32,7 +32,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 
 import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
@@ -87,6 +86,8 @@ import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.persist.Transactional;
 import com.google.inject.util.Modules;
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.WebResource;
 
 import junit.framework.Assert;
 
@@ -640,15 +641,35 @@ public class ExecutionScheduleManagerTest {
   }
 
   @Test
-  public void testCompleteRelativePath() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
-    ExecutionScheduleManager scheduleManagerMock = createMock(ExecutionScheduleManager.class);
-    Method completeRelativeUri = ExecutionScheduleManager.class.getDeclaredMethod("completeRelativeUri", String.class);
-    completeRelativeUri.setAccessible(true);
-
-    assertEquals("api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "clusters"));
-    assertEquals("api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "/clusters"));
-    assertEquals("/api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "/api/v1/clusters"));
-    assertEquals("api/v1/clusters", completeRelativeUri.invoke(scheduleManagerMock, "api/v1/clusters"));
-    assertEquals("", completeRelativeUri.invoke(scheduleManagerMock, ""));
+  public void testExtendApiResource() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+    WebResource webResource = Client.create().resource("http://localhost:8080/");
+
+    String clustersEndpoint = "http://localhost:8080/api/v1/clusters";
+
+    Clusters clustersMock = createMock(Clusters.class);
+
+    Configuration configurationMock = createNiceMock(Configuration.class);
+    ExecutionScheduler executionSchedulerMock = createMock(ExecutionScheduler.class);
+    InternalTokenStorage tokenStorageMock = createMock(InternalTokenStorage.class);
+    ActionDBAccessor actionDBAccessorMock = createMock(ActionDBAccessor.class);
+    Gson gson = new Gson();
+
+    replay(clustersMock, configurationMock, executionSchedulerMock, tokenStorageMock,
+      actionDBAccessorMock);
+
+    ExecutionScheduleManager scheduleManager =
+      new ExecutionScheduleManager(configurationMock, executionSchedulerMock,
+        tokenStorageMock, clustersMock, actionDBAccessorMock, gson);
+
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "clusters").getURI().toString());
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "/clusters").getURI().toString());
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "/api/v1/clusters").getURI().toString());
+    assertEquals(clustersEndpoint,
+      scheduleManager.extendApiResource(webResource, "api/v1/clusters").getURI().toString());
+    assertEquals("http://localhost:8080/",
+      scheduleManager.extendApiResource(webResource, "").getURI().toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
index 95e5513..8ff5ad2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
@@ -149,7 +149,7 @@ public class AbstractPrepareKerberosServerActionTest {
       identityFilter,
       "",
         configurations, kerberosConfigurations,
-        false, propertiesToIgnore);
+        false, propertiesToIgnore, false);
 
     verify(kerberosHelper);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
index 323ba8e..03727d7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
@@ -54,7 +54,7 @@ public class KerberosIdentityDataFileTest {
           "principal" + i, "principal_type" + i, "keytabFilePath" + i,
           "keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
           "keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
-          "false");
+          "false", "false");
     }
 
     // Add some odd characters
@@ -62,7 +62,7 @@ public class KerberosIdentityDataFileTest {
         "principal", "principal_type", "keytabFilePath",
         "'keytabFileOwnerName'", "<keytabFileOwnerAccess>",
         "\"keytabFileGroupName\"", "keytab,File,Group,Access",
-        "false");
+        "false", "false");
 
     writer.close();
     Assert.assertTrue(writer.isClosed());
@@ -153,7 +153,7 @@ public class KerberosIdentityDataFileTest {
         "principal", "principal_type", "keytabFilePath",
         "keytabFileOwnerName", "keytabFileOwnerAccess",
         "keytabFileGroupName", "keytabFileGroupAccess",
-        "true");
+        "true", "false");
 
     writer.close();
     Assert.assertTrue(writer.isClosed());
@@ -179,7 +179,7 @@ public class KerberosIdentityDataFileTest {
         "principal", "principal_type", "keytabFilePath",
         "keytabFileOwnerName", "keytabFileOwnerAccess",
         "keytabFileGroupName", "keytabFileGroupAccess",
-        "true");
+        "true", "false");
 
     writer.close();
     Assert.assertTrue(writer.isClosed());
@@ -205,4 +205,4 @@ public class KerberosIdentityDataFileTest {
     Assert.assertEquals(0, i);
 
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
index f63e6b8..a43db4d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
@@ -120,7 +120,7 @@ public class KerberosServerActionTest {
           "principal|_HOST|_REALM" + i, "principal_type", "keytabFilePath" + i,
           "keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
           "keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
-          "false");
+          "false", "false");
     }
     writer.close();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index d07ac15..9f87312 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
@@ -56,7 +55,6 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
@@ -308,12 +306,6 @@ public class ComponentVersionCheckActionTest {
 
     makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
-    // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
-
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
-    assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
-
     // Finalize the upgrade
     Map<String, String> commandParams = new HashMap<>();
     ExecutionCommand executionCommand = new ExecutionCommand();
@@ -470,12 +462,6 @@ public class ComponentVersionCheckActionTest {
     sch = createNewServiceComponentHost(cluster, "ZOOKEEPER", "ZOOKEEPER_SERVER", "h1");
     sch.setVersion(HDP_2_1_1_1);
 
-    // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
-
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
-    assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
-
     // Finalize the upgrade
     Map<String, String> commandParams = new HashMap<>();
     ExecutionCommand executionCommand = new ExecutionCommand();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
new file mode 100644
index 0000000..61ca682
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerUsersyncConfigCalculationTest.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Injector;
+
+public class RangerUsersyncConfigCalculationTest {
+
+  private Injector m_injector;
+  private Clusters m_clusters;
+  private Field m_clusterField;
+
+  @Before
+  public void setup() throws Exception {
+    m_injector = createMock(Injector.class);
+    m_clusters = createMock(Clusters.class);
+    Cluster cluster = createMock(Cluster.class);
+
+    Map<String, String> mockRangerUsersyncProperties = new HashMap<String, String>() {
+      {
+        put("ranger.usersync.ldap.grouphierarchylevels", "2");
+      }
+    };
+
+    Map<String, String> mockRangerEnvProperties = new HashMap<String, String>();
+
+    Config rangerUsersyncConfig = createMock(Config.class);
+    expect(rangerUsersyncConfig.getType()).andReturn("ranger-ugsync-site").anyTimes();
+    expect(rangerUsersyncConfig.getProperties()).andReturn(mockRangerUsersyncProperties).anyTimes();
+
+    Config rangerEnvConfig = createMock(Config.class);
+    expect(rangerEnvConfig.getType()).andReturn("ranger-env").anyTimes();
+    expect(rangerEnvConfig.getProperties()).andReturn(mockRangerEnvProperties).anyTimes();
+
+    rangerEnvConfig.setProperties(anyObject(Map.class));
+    expectLastCall().atLeastOnce();
+
+    rangerEnvConfig.save();
+    expectLastCall().atLeastOnce();
+
+    expect(cluster.getDesiredConfigByType("ranger-ugsync-site")).andReturn(rangerUsersyncConfig).atLeastOnce();
+    expect(cluster.getDesiredConfigByType("ranger-env")).andReturn(rangerEnvConfig).atLeastOnce();
+    expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
+    expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
+
+    replay(m_injector, m_clusters, cluster, rangerUsersyncConfig, rangerEnvConfig);
+
+    m_clusterField = RangerUsersyncConfigCalculation.class.getDeclaredField("m_clusters");
+    m_clusterField.setAccessible(true);
+
+  }
+
+  @Test
+  public void testAction() throws Exception {
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("clusterName", "cl1");
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("cl1");
+
+    HostRoleCommand hrc = createMock(HostRoleCommand.class);
+    expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+    expect(hrc.getStageId()).andReturn(2L).anyTimes();
+    expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
+    replay(hrc);
+
+    RangerUsersyncConfigCalculation action = new RangerUsersyncConfigCalculation();
+    m_clusterField.set(action, m_clusters);
+
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hrc);
+
+    CommandReport report = action.execute(null);
+    Assert.assertNotNull(report);
+
+    Cluster cl = m_clusters.getCluster("cl1");
+    Config config = cl.getDesiredConfigByType("ranger-env");
+
+    Map<String, String> map = config.getProperties();
+
+    Assert.assertTrue(map.containsKey("is_nested_groupsync_enabled"));
+    Assert.assertEquals("true", map.get("is_nested_groupsync_enabled"));
+
+    report = action.execute(null);
+    Assert.assertNotNull(report);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
index 0f1dd55..d208cfc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
@@ -19,147 +19,113 @@
 package org.apache.ambari.server.stack;
 
 import java.io.File;
-import java.io.IOException;
+import java.io.InputStream;
 import java.net.URL;
+import java.util.Set;
+import java.util.regex.Pattern;
 
-import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
-import org.springframework.util.Assert;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.networknt.schema.JsonSchema;
+import com.networknt.schema.JsonSchemaFactory;
+import com.networknt.schema.ValidationMessage;
+
+import junit.framework.Assert;
 
 /**
  * KerberosDescriptorTest tests the stack- and service-level descriptors for certain stacks
  * and services
  */
-@Ignore
+@Category({category.KerberosTest.class})
 public class KerberosDescriptorTest {
-  private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
+  private static Logger LOG = LoggerFactory.getLogger(KerberosDescriptorTest.class);
+
+  private static final Pattern PATTERN_KERBEROS_DESCRIPTOR_FILENAME = Pattern.compile("^kerberos(?:_preconfigure)?\\.json$");
 
   private static File stacksDirectory;
-  private static File hdpStackDirectory;
-  private static File hdp22StackDirectory;
-  private static File hdp22ServicesDirectory;
   private static File commonServicesDirectory;
 
   @BeforeClass
   public static void beforeClass() {
     URL rootDirectoryURL = KerberosDescriptorTest.class.getResource("/");
-    Assert.notNull(rootDirectoryURL);
+    Assert.assertNotNull(rootDirectoryURL);
 
     File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
-    Assert.notNull(resourcesDirectory);
-    Assert.isTrue(resourcesDirectory.canRead());
+    Assert.assertNotNull(resourcesDirectory);
+    Assert.assertTrue(resourcesDirectory.canRead());
 
     stacksDirectory = new File(resourcesDirectory, "stacks");
-    Assert.notNull(stacksDirectory);
-    Assert.isTrue(stacksDirectory.canRead());
-
-    hdpStackDirectory = new File(stacksDirectory, "HDP");
-    Assert.notNull(hdpStackDirectory);
-    Assert.isTrue(hdpStackDirectory.canRead());
-
-    hdp22StackDirectory = new File(hdpStackDirectory, "2.2");
-    Assert.notNull(hdp22StackDirectory);
-    Assert.isTrue(hdp22StackDirectory.canRead());
-
-    hdp22ServicesDirectory = new File(hdp22StackDirectory, "services");
-    Assert.notNull(hdp22ServicesDirectory);
-    Assert.isTrue(hdp22ServicesDirectory.canRead());
+    Assert.assertNotNull(stacksDirectory);
+    Assert.assertTrue(stacksDirectory.canRead());
 
     commonServicesDirectory = new File(resourcesDirectory, "common-services");
-    Assert.notNull(commonServicesDirectory);
-    Assert.isTrue(commonServicesDirectory.canRead());
+    Assert.assertNotNull(commonServicesDirectory);
+    Assert.assertTrue(commonServicesDirectory.canRead());
 
   }
 
   @Test
-  public void testCommonHBASEServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "HBASE", "0.96.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("HBASE"));
+  public void testCommonServiceDescriptor() throws Exception {
+    JsonSchema schema = getJsonSchemaFromPath("kerberos_descriptor_schema.json");
+    Assert.assertTrue(visitFile(schema, commonServicesDirectory, true));
   }
 
   @Test
-  public void testCommonHDFSServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "HDFS", "2.1.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("HDFS"));
+  public void testStackServiceDescriptor() throws Exception {
+    JsonSchema schema = getJsonSchemaFromPath("kerberos_descriptor_schema.json");
+    Assert.assertTrue(visitFile(schema, stacksDirectory, true));
   }
 
-  @Test
-  public void testCommonYarnServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "YARN", "2.1.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("YARN"));
-    Assert.notNull(descriptor.getService("MAPREDUCE2"));
-  }
+  private boolean visitFile(JsonSchema schema, File file, boolean previousResult) throws Exception {
 
-  @Test
-  public void testCommonFalconServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "FALCON", "0.5.0.2.1");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("FALCON"));
-  }
+    if (file.isDirectory()) {
+      boolean currentResult = true;
 
-  @Test
-  public void testCommonHiveServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "HIVE", "0.12.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("HIVE"));
-  }
+      File[] files = file.listFiles();
+      if (files != null) {
+        for (File currentFile : files) {
+          currentResult = visitFile(schema, currentFile, previousResult) && currentResult;
+        }
+      }
+      return previousResult && currentResult;
+    } else if (file.isFile()) {
+      if (PATTERN_KERBEROS_DESCRIPTOR_FILENAME.matcher(file.getName()).matches()) {
+        LOG.info("Validating " + file.getAbsolutePath());
 
-  @Test
-  public void testCommonKnoxServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "KNOX", "0.5.0.2.2");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("KNOX"));
-  }
+        JsonNode node = getJsonNodeFromUrl(file.toURI().toURL().toExternalForm());
+        Set<ValidationMessage> errors = schema.validate(node);
 
-  @Test
-  public void testCommonOozieServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor;
+        if ((errors != null) && !errors.isEmpty()) {
+          for (ValidationMessage message : errors) {
+            LOG.error(message.getMessage());
+          }
 
-    descriptor = getKerberosDescriptor(commonServicesDirectory, "OOZIE", "4.0.0.2.0");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("OOZIE"));
-  }
+          return false;
+        }
 
-  @Test
-  public void testCommonStormServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "STORM", "0.9.1");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("STORM"));
-  }
+        return true;
+      } else {
+        return true;
+      }
+    }
 
-  @Test
-  public void testCommonZookeepeerServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "ZOOKEEPER", "3.4.5");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("ZOOKEEPER"));
+    return previousResult;
   }
 
-  @Test
-  public void testCommonSparkServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "SPARK", "1.2.1");
-    Assert.notNull(descriptor);
-    Assert.notNull(descriptor.getServices());
-    Assert.notNull(descriptor.getService("SPARK"));
+  private JsonNode getJsonNodeFromUrl(String url) throws Exception {
+    ObjectMapper mapper = new ObjectMapper();
+    return mapper.readTree(new URL(url));
   }
 
-  private KerberosDescriptor getKerberosDescriptor(File baseDirectory, String service, String version) throws IOException {
-    File serviceDirectory = new File(baseDirectory, service);
-    File serviceVersionDirectory = new File(serviceDirectory, version);
-    return KERBEROS_DESCRIPTOR_FACTORY.createInstance(new File(serviceVersionDirectory, "kerberos.json"));
+  private JsonSchema getJsonSchemaFromPath(String name) throws Exception {
+    JsonSchemaFactory factory = new JsonSchemaFactory();
+    InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(name);
+    return factory.getSchema(is);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java
new file mode 100644
index 0000000..5983dce
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackServiceDirectoryTest.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+
+import org.apache.ambari.server.AmbariException;
+
+import org.junit.Test;
+
+/**
+ * Tests for StackServiceDirectory
+ */
+public class StackServiceDirectoryTest {
+
+  private MockStackServiceDirectory createStackServiceDirectory(String servicePath) throws AmbariException {
+    MockStackServiceDirectory ssd = new MockStackServiceDirectory(servicePath);
+    return ssd;
+  }
+
+  @Test
+  public void testValidServiceAdvisorClassName() throws Exception {
+    String pathWithInvalidChars = "/Fake-Stack.Name/1.0/services/FAKESERVICE/";
+    String serviceNameValidChars = "FakeService";
+
+    String pathWithValidChars = "/FakeStackName/1.0/services/FAKESERVICE/";
+    String serviceNameInvalidChars = "Fake-Serv.ice";
+
+    String desiredServiceAdvisorName = "FakeStackName10FakeServiceServiceAdvisor";
+
+    MockStackServiceDirectory ssd1 = createStackServiceDirectory(pathWithInvalidChars);
+    assertEquals(desiredServiceAdvisorName, ssd1.getAdvisorName(serviceNameValidChars));
+
+    MockStackServiceDirectory ssd2 = createStackServiceDirectory(pathWithValidChars);
+    assertEquals(desiredServiceAdvisorName, ssd2.getAdvisorName(serviceNameInvalidChars));
+
+    MockStackServiceDirectory ssd3 = createStackServiceDirectory(pathWithInvalidChars);
+    assertEquals(desiredServiceAdvisorName, ssd3.getAdvisorName(serviceNameInvalidChars));
+
+    MockStackServiceDirectory ssd4 = createStackServiceDirectory(pathWithValidChars);
+    assertEquals(desiredServiceAdvisorName, ssd4.getAdvisorName(serviceNameValidChars));
+  }
+
+  private class MockStackServiceDirectory extends StackServiceDirectory {
+    File advisor = null;
+
+    MockStackServiceDirectory (String servicePath) throws AmbariException {
+      super(servicePath);
+      advisor = new File(servicePath, StackDirectory.SERVICE_ADVISOR_FILE_NAME);
+    }
+
+    protected void parsePath() {}
+
+    public File getAdvisorFile() {
+      return advisor;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
index d341a09..0cc43ba 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
@@ -274,15 +274,28 @@ public class ServiceComponentTest {
     service.addServiceComponent(component);
 
     addHostToCluster("h1", service.getCluster().getClusterName());
+    addHostToCluster("h2", service.getCluster().getClusterName());
+    addHostToCluster("h3", service.getCluster().getClusterName());
     ServiceComponentHost sch =
       serviceComponentHostFactory.createNew(component, "h1");
+    ServiceComponentHost sch2 =
+      serviceComponentHostFactory.createNew(component, "h2");
+    ServiceComponentHost sch3 =
+      serviceComponentHostFactory.createNew(component, "h3");
     sch.setState(State.INSTALLED);
+    sch2.setState(State.INSTALLED);
+    sch3.setState(State.INSTALLED);
 
     Map<String, ServiceComponentHost> compHosts =
       new HashMap<>();
     compHosts.put("h1", sch);
+    compHosts.put("h2", sch2);
+    compHosts.put("h3", sch3);
     component.addServiceComponentHosts(compHosts);
-    Assert.assertEquals(1, component.getServiceComponentHosts().size());
+    Assert.assertEquals(3, component.getServiceComponentHosts().size());
+
+    component.getServiceComponentHost("h2").setMaintenanceState(MaintenanceState.ON);
+    sch3.setMaintenanceState(MaintenanceState.ON);
 
     ServiceComponent sc = service.getServiceComponent(componentName);
     Assert.assertNotNull(sc);
@@ -299,9 +312,11 @@ public class ServiceComponentTest {
     int totalCount = r.getServiceComponentStateCount().get("totalCount");
     int startedCount = r.getServiceComponentStateCount().get("startedCount");
     int installedCount = r.getServiceComponentStateCount().get("installedCount");
-    Assert.assertEquals(1, totalCount);
+    int installedAndMaintenanceOffCount = r.getServiceComponentStateCount().get("installedAndMaintenanceOffCount");
+    Assert.assertEquals(3, totalCount);
     Assert.assertEquals(0, startedCount);
-    Assert.assertEquals(1, installedCount);
+    Assert.assertEquals(3, installedCount);
+    Assert.assertEquals(1, installedAndMaintenanceOffCount);
 
     // TODO check configs
     // r.getConfigVersions()