You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2015/05/30 01:48:51 UTC

[1/5] ambari git commit: AMBARI-11396. Kerberos: UI shows Kerberize Cluster step as failed with a retry button, but the backend keeps moving forward to Kerberize the cluster (rlevas)

Repository: ambari
Updated Branches:
  refs/heads/trunk 7e88541ce -> e3acc7f06


http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 5a6ddd3..684cdd4 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -113,15 +113,19 @@ public class KerberosHelperTest extends EasyMockSupport {
   private final ClusterController clusterController = createStrictMock(ClusterController.class);
   private final KerberosDescriptorFactory kerberosDescriptorFactory = createStrictMock(KerberosDescriptorFactory.class);
   private final KerberosConfigDataFileWriterFactory kerberosConfigDataFileWriterFactory = createStrictMock(KerberosConfigDataFileWriterFactory.class);
-  private final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
-  private final TopologyManager topologyManager = createNiceMock(TopologyManager.class);
+  private final AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class);
+  private final TopologyManager topologyManager = createMock(TopologyManager.class);
 
   @Before
   public void setUp() throws Exception {
     reset(clusterController);
     reset(metaInfo);
 
-    final KerberosOperationHandlerFactory kerberosOperationHandlerFactory = createNiceMock(KerberosOperationHandlerFactory.class);
+    final KerberosOperationHandlerFactory kerberosOperationHandlerFactory = createMock(KerberosOperationHandlerFactory.class);
+
+    expect(kerberosOperationHandlerFactory.getKerberosOperationHandler(KDCType.NONE))
+        .andReturn(null)
+        .anyTimes();
 
     expect(kerberosOperationHandlerFactory.getKerberosOperationHandler(KDCType.MIT_KDC))
         .andReturn(new KerberosOperationHandler() {
@@ -183,6 +187,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         bind(KerberosDescriptorFactory.class).toInstance(kerberosDescriptorFactory);
         bind(KerberosConfigDataFileWriterFactory.class).toInstance(kerberosConfigDataFileWriterFactory);
         bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+        bind(KerberosHelper.class).to(KerberosHelperImpl.class);
       }
     });
 
@@ -215,11 +220,11 @@ public class KerberosHelperTest extends EasyMockSupport {
   public void testMissingKrb5Conf() throws Exception {
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("ldap_url")).andReturn("").once();
     expect(kerberosEnvProperties.get("container_dn")).andReturn("").once();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -234,14 +239,14 @@ public class KerberosHelperTest extends EasyMockSupport {
   public void testMissingKerberosEnvConf() throws Exception {
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("realm")).andReturn("EXAMPLE.COM").once();
     expect(kerberosEnvProperties.get("kdc_host")).andReturn("10.0.100.1").once();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
     expect(krb5ConfProperties.get("kadmin_host")).andReturn("10.0.100.1").once();
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -849,27 +854,9 @@ public class KerberosHelperTest extends EasyMockSupport {
                                   boolean getStackDescriptor) throws Exception {
 
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
+    boolean identitiesManaged = (manageIdentities == null) || !"false".equalsIgnoreCase(manageIdentities);
 
-    KerberosConfigDataFileWriter kerberosConfigDataFileWriter = createMock(KerberosConfigDataFileWriter.class);
-    kerberosConfigDataFileWriter.addRecord("cluster-env", "security_enabled", "true", "SET");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service1-site", "component1.kerberos.principal", "component1/_HOST@${realm}", "SET");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service1-site", "component1.keytab.file", "${keytab_dir}/service1.keytab", "SET");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service2-site", "component2.kerberos.principal", "component2/host1@${realm}", "SET");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service2-site", "component2.keytab.file", "${keytab_dir}/service2.keytab", "SET");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.close();
-    expectLastCall().times(1);
-
-    KerberosConfigDataFileWriterFactory factory = injector.getInstance(KerberosConfigDataFileWriterFactory.class);
-    expect(factory.createKerberosConfigDataFileWriter(anyObject(File.class)))
-        .andReturn(kerberosConfigDataFileWriter)
-        .times(1);
-
-    final StackId stackVersion = createNiceMock(StackId.class);
+    final StackId stackVersion = createMock(StackId.class);
 
     final ServiceComponentHost schKerberosClient = createMock(ServiceComponentHost.class);
     expect(schKerberosClient.getServiceName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
@@ -920,7 +907,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-            .times(3);
+            .times(1);
     serviceKerberos.setSecurityState(SecurityState.SECURED_KERBEROS);
     expectLastCall().once();
 
@@ -928,7 +915,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
     service1.setSecurityState(SecurityState.SECURED_KERBEROS);
     expectLastCall().once();
 
@@ -936,7 +923,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
     service2.setSecurityState(SecurityState.SECURED_KERBEROS);
     expectLastCall().once();
 
@@ -945,12 +932,12 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosEnvProperties.get("manage_identities")).andReturn(manageIdentities).anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
     final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Cluster cluster = createMock(Cluster.class);
@@ -988,98 +975,51 @@ public class KerberosHelperTest extends EasyMockSupport {
         put("kerberos_admin/" + KerberosCredential.KEY_NAME_KEYTAB, kerberosCredential.getKeytab());
       }
     }}).anyTimes();
+    expect(cluster.getServiceComponentHosts("KERBEROS", "KERBEROS_CLIENT")).andReturn(
+        Arrays.asList(schKerberosClient)
+    ).once();
 
-    final Clusters clusters = injector.getInstance(Clusters.class);
-    expect(clusters.getHostsForCluster("c1"))
-        .andReturn(new HashMap<String, Host>() {
-          {
-            put("host1", host);
-          }
-        })
-        .once();
-    if((manageIdentities == null) || !"false".equalsIgnoreCase(manageIdentities)) {
+    if(identitiesManaged) {
+      final Clusters clusters = injector.getInstance(Clusters.class);
       expect(clusters.getHost("host1"))
           .andReturn(host)
           .once();
     }
 
     final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host1"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
     expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, null))
         .andReturn(Collections.<String, Map<String, String>>emptyMap())
         .once();
     expect(ambariManagementController.getRoleCommandOrder(cluster))
-        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .andReturn(createMock(RoleCommandOrder.class))
         .once();
 
-    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
-    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
-        .andReturn(new HashMap<String, Map<String, String>>() {
-          {
-            put("cluster-env", new HashMap<String, String>() {{
-              put("kerberos_domain", "FOOBAR.COM");
-            }});
-          }
-        })
-        .times(2);
+    final KerberosIdentityDescriptor identityDescriptor1 = createMock(KerberosIdentityDescriptor.class);
+
+    final KerberosIdentityDescriptor identityDescriptor2 = createMock(KerberosIdentityDescriptor.class);
 
-    final KerberosPrincipalDescriptor principalDescriptor1 = createNiceMock(KerberosPrincipalDescriptor.class);
-    expect(principalDescriptor1.getValue()).andReturn("component1/_HOST@${realm}").once();
-    expect(principalDescriptor1.getType()).andReturn(KerberosPrincipalType.SERVICE).once();
-    expect(principalDescriptor1.getConfiguration()).andReturn("service1-site/component1.kerberos.principal").once();
-
-    final KerberosPrincipalDescriptor principalDescriptor2 = createNiceMock(KerberosPrincipalDescriptor.class);
-    expect(principalDescriptor2.getValue()).andReturn("component2/${host}@${realm}").once();
-    expect(principalDescriptor2.getType()).andReturn(KerberosPrincipalType.SERVICE).once();
-    expect(principalDescriptor2.getConfiguration()).andReturn("service2-site/component2.kerberos.principal").once();
-
-    final KerberosKeytabDescriptor keytabDescriptor1 = createNiceMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").once();
-    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").once();
-    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
-    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
-
-    final KerberosKeytabDescriptor keytabDescriptor2 = createNiceMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").once();
-    expect(keytabDescriptor2.getOwnerName()).andReturn("service2").once();
-    expect(keytabDescriptor2.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor2.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").once();
-    expect(keytabDescriptor2.isCachable()).andReturn(false).once();
-
-    final KerberosIdentityDescriptor identityDescriptor1 = createNiceMock(KerberosIdentityDescriptor.class);
-    expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).once();
-    expect(identityDescriptor1.getKeytabDescriptor()).andReturn(keytabDescriptor1).once();
-
-    final KerberosIdentityDescriptor identityDescriptor2 = createNiceMock(KerberosIdentityDescriptor.class);
-    expect(identityDescriptor2.getPrincipalDescriptor()).andReturn(principalDescriptor2).once();
-    expect(identityDescriptor2.getKeytabDescriptor()).andReturn(keytabDescriptor2).once();
-
-    final KerberosComponentDescriptor componentDescriptor1 = createNiceMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor1.getIdentities(true)).
+    final KerberosComponentDescriptor componentDescriptor1 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor1.getIdentities(false)).
         andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
           add(identityDescriptor1);
         }}).once();
 
-    final KerberosComponentDescriptor componentDescriptor2 = createNiceMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor2.getIdentities(true)).
+    final KerberosComponentDescriptor componentDescriptor2 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor2.getIdentities(false)).
         andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
           add(identityDescriptor2);
         }}).once();
 
-    final KerberosServiceDescriptor serviceDescriptor1 = createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
     expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).once();
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).once();
 
-    final KerberosServiceDescriptor serviceDescriptor2 = createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosServiceDescriptor serviceDescriptor2 = createMock(KerberosServiceDescriptor.class);
     expect(serviceDescriptor2.getComponent("COMPONENT2")).andReturn(componentDescriptor2).once();
+    expect(serviceDescriptor2.getIdentities(false)).andReturn(null).once();
 
-    final KerberosDescriptor kerberosDescriptor = createNiceMock(KerberosDescriptor.class);
+    final KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
+    expect(kerberosDescriptor.getService("KERBEROS")).andReturn(null).once();
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).once();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).once();
 
@@ -1109,7 +1049,13 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     // This is a STRICT mock to help ensure that the end result is what we want.
     final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
-    if((manageIdentities == null) || !"false".equalsIgnoreCase(manageIdentities)) {
+    // Create Preparation Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
+
+    if(identitiesManaged) {
       // Create Principals Stage
       expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
       expect(requestStageContainer.getId()).andReturn(1L).once();
@@ -1154,26 +1100,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
 
-    KerberosConfigDataFileWriter kerberosConfigDataFileWriter = createMock(KerberosConfigDataFileWriter.class);
-    kerberosConfigDataFileWriter.addRecord("cluster-env", "security_enabled", "false", "SET");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service1-site", "component1.kerberos.principal", null, "REMOVE");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service1-site", "component1.keytab.file", null, "REMOVE");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service2-site", "component2.kerberos.principal", null, "REMOVE");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.addRecord("service2-site", "component2.keytab.file", null, "REMOVE");
-    expectLastCall().times(1);
-    kerberosConfigDataFileWriter.close();
-    expectLastCall().times(1);
-
-    KerberosConfigDataFileWriterFactory factory = injector.getInstance(KerberosConfigDataFileWriterFactory.class);
-    expect(factory.createKerberosConfigDataFileWriter(anyObject(File.class)))
-        .andReturn(kerberosConfigDataFileWriter)
-        .times(1);
-
-    final StackId stackVersion = createNiceMock(StackId.class);
+    final StackId stackVersion = createMock(StackId.class);
 
     final ServiceComponentHost schKerberosClient = createMock(ServiceComponentHost.class);
     expect(schKerberosClient.getServiceName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
@@ -1185,7 +1112,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(schKerberosClient.getState()).andReturn(State.INSTALLED).anyTimes();
 
     final ServiceComponentHost sch1 = createMock(ServiceComponentHost.class);
-    expect(sch1.getServiceName()).andReturn("SERVICE1").times(2);
+    expect(sch1.getServiceName()).andReturn("SERVICE1").times(1);
     expect(sch1.getServiceComponentName()).andReturn("COMPONENT1").once();
     expect(sch1.getSecurityState()).andReturn(SecurityState.SECURED_KERBEROS).anyTimes();
     expect(sch1.getDesiredSecurityState()).andReturn(SecurityState.SECURED_KERBEROS).anyTimes();
@@ -1199,7 +1126,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(expectLastCall()).once();
 
     final ServiceComponentHost sch2 = createMock(ServiceComponentHost.class);
-    expect(sch2.getServiceName()).andReturn("SERVICE2").times(2);
+    expect(sch2.getServiceName()).andReturn("SERVICE2").times(1);
     expect(sch2.getServiceComponentName()).andReturn("COMPONENT2").anyTimes();
     expect(sch2.getSecurityState()).andReturn(SecurityState.SECURED_KERBEROS).anyTimes();
     expect(sch2.getDesiredSecurityState()).andReturn(SecurityState.SECURED_KERBEROS).anyTimes();
@@ -1223,7 +1150,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(3);
+        .times(1);
     serviceKerberos.setSecurityState(SecurityState.UNSECURED);
     expectLastCall().once();
 
@@ -1231,7 +1158,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
     service1.setSecurityState(SecurityState.UNSECURED);
     expectLastCall().once();
 
@@ -1239,20 +1166,20 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
     service2.setSecurityState(SecurityState.UNSECURED);
     expectLastCall().once();
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -1290,96 +1217,40 @@ public class KerberosHelperTest extends EasyMockSupport {
       }
     }}).anyTimes();
 
-    final Clusters clusters = injector.getInstance(Clusters.class);
-    expect(clusters.getHostsForCluster("c1"))
-        .andReturn(new HashMap<String, Host>() {
-          {
-            put("host1", host);
-          }
-        })
-        .once();
-    expect(clusters.getHost("host1"))
-        .andReturn(host)
-        .once();
-
-
     final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host1"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
     expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, null))
         .andReturn(Collections.<String, Map<String, String>>emptyMap())
         .once();
     expect(ambariManagementController.getRoleCommandOrder(cluster))
-        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .andReturn(createMock(RoleCommandOrder.class))
         .once();
 
-    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
-    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
-        .andReturn(new HashMap<String, Map<String, String>>() {
-          {
-            put("cluster-env", new HashMap<String, String>() {{
-              put("kerberos_domain", "FOOBAR.COM");
-            }});
-          }
-        })
-        .times(2);
+    final KerberosIdentityDescriptor identityDescriptor1 = createMock(KerberosIdentityDescriptor.class);
+
+    final KerberosIdentityDescriptor identityDescriptor2 = createMock(KerberosIdentityDescriptor.class);
 
-    final KerberosPrincipalDescriptor principalDescriptor1 = createNiceMock(KerberosPrincipalDescriptor.class);
-    expect(principalDescriptor1.getValue()).andReturn("component1/_HOST@${realm}").once();
-    expect(principalDescriptor1.getType()).andReturn(KerberosPrincipalType.SERVICE).once();
-    expect(principalDescriptor1.getConfiguration()).andReturn("service1-site/component1.kerberos.principal").once();
-
-    final KerberosPrincipalDescriptor principalDescriptor2 = createNiceMock(KerberosPrincipalDescriptor.class);
-    expect(principalDescriptor2.getValue()).andReturn("component2/${host}@${realm}").once();
-    expect(principalDescriptor2.getType()).andReturn(KerberosPrincipalType.SERVICE).once();
-    expect(principalDescriptor2.getConfiguration()).andReturn("service2-site/component2.kerberos.principal").once();
-
-    final KerberosKeytabDescriptor keytabDescriptor1 = createNiceMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").once();
-    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").once();
-    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
-    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
-
-    final KerberosKeytabDescriptor keytabDescriptor2 = createNiceMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").once();
-    expect(keytabDescriptor2.getOwnerName()).andReturn("service2").once();
-    expect(keytabDescriptor2.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor2.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").once();
-    expect(keytabDescriptor2.isCachable()).andReturn(false).once();
-
-    final KerberosIdentityDescriptor identityDescriptor1 = createNiceMock(KerberosIdentityDescriptor.class);
-    expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).once();
-    expect(identityDescriptor1.getKeytabDescriptor()).andReturn(keytabDescriptor1).once();
-
-    final KerberosIdentityDescriptor identityDescriptor2 = createNiceMock(KerberosIdentityDescriptor.class);
-    expect(identityDescriptor2.getPrincipalDescriptor()).andReturn(principalDescriptor2).once();
-    expect(identityDescriptor2.getKeytabDescriptor()).andReturn(keytabDescriptor2).once();
-
-    final KerberosComponentDescriptor componentDescriptor1 = createNiceMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor1.getIdentities(true)).
+    final KerberosComponentDescriptor componentDescriptor1 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor1.getIdentities(false)).
         andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
           add(identityDescriptor1);
         }}).once();
 
-    final KerberosComponentDescriptor componentDescriptor2 = createNiceMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor2.getIdentities(true)).
+    final KerberosComponentDescriptor componentDescriptor2 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor2.getIdentities(false)).
         andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
           add(identityDescriptor2);
         }}).once();
 
-    final KerberosServiceDescriptor serviceDescriptor1 = createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
     expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).once();
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).times(1);
 
-    final KerberosServiceDescriptor serviceDescriptor2 = createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosServiceDescriptor serviceDescriptor2 = createMock(KerberosServiceDescriptor.class);
     expect(serviceDescriptor2.getComponent("COMPONENT2")).andReturn(componentDescriptor2).once();
+    expect(serviceDescriptor2.getIdentities(false)).andReturn(null).times(1);
 
-    final KerberosDescriptor kerberosDescriptor = createNiceMock(KerberosDescriptor.class);
+    final KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
+    expect(kerberosDescriptor.getService("KERBEROS")).andReturn(null).once();
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).once();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).once();
 
@@ -1410,6 +1281,11 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     // This is a STRICT mock to help ensure that the end result is what we want.
     final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
+    // Preparation Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(2L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
     // Update Configs Stage
     expect(requestStageContainer.getLastStageId()).andReturn(2L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
@@ -1451,7 +1327,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
 
-    final StackId stackVersion = createNiceMock(StackId.class);
+    final StackId stackVersion = createMock(StackId.class);
 
     final ServiceComponentHost schKerberosClient = createMock(ServiceComponentHost.class);
     expect(schKerberosClient.getServiceName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
@@ -1503,7 +1379,6 @@ public class KerberosHelperTest extends EasyMockSupport {
     }
     else {
       schKerberosClientInvalid = null;
-      sch1a = null;
       hostInvalid = null;
     }
 
@@ -1522,30 +1397,30 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(3);
+        .times(1);
 
     final Service service1 = createStrictMock(Service.class);
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
 
     final Service service2 = createStrictMock(Service.class);
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -1587,99 +1462,40 @@ public class KerberosHelperTest extends EasyMockSupport {
       }
     }}).anyTimes();
 
-    final Clusters clusters = injector.getInstance(Clusters.class);
-    expect(clusters.getHostsForCluster("c1"))
-        .andReturn(new HashMap<String, Host>() {
-          {
-            put("host1", host);
-
-            if (testInvalidHost) {
-              put("host2", hostInvalid);
-            }
-          }
-        })
-        .once();
-    expect(clusters.getHost("host1"))
-        .andReturn(host)
-        .once();
-
     final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host1"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
     expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, null))
         .andReturn(Collections.<String, Map<String, String>>emptyMap())
         .once();
     expect(ambariManagementController.getRoleCommandOrder(cluster))
-        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .andReturn(createMock(RoleCommandOrder.class))
         .once();
 
-    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
-    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
-        .andReturn(new HashMap<String, Map<String, String>>() {
-          {
-            put("cluster-env", new HashMap<String, String>() {{
-              put("kerberos_domain", "FOOBAR.COM");
-            }});
-          }
-        })
-        .times(2);
+    final KerberosIdentityDescriptor identityDescriptor1 = createMock(KerberosIdentityDescriptor.class);
 
-    final KerberosPrincipalDescriptor principalDescriptor1 = createNiceMock(KerberosPrincipalDescriptor.class);
-    expect(principalDescriptor1.getValue()).andReturn("component1/_HOST@${realm}").once();
-    expect(principalDescriptor1.getType()).andReturn(KerberosPrincipalType.SERVICE).once();
-    expect(principalDescriptor1.getConfiguration()).andReturn("service1-site/component1.kerberos.principal").once();
-
-    final KerberosPrincipalDescriptor principalDescriptor2 = createNiceMock(KerberosPrincipalDescriptor.class);
-    expect(principalDescriptor2.getValue()).andReturn("component2/${host}@${realm}").once();
-    expect(principalDescriptor2.getType()).andReturn(KerberosPrincipalType.SERVICE).once();
-    expect(principalDescriptor2.getConfiguration()).andReturn("service2-site/component2.kerberos.principal").once();
-
-    final KerberosKeytabDescriptor keytabDescriptor1 = createNiceMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").once();
-    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").once();
-    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
-    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
-
-    final KerberosKeytabDescriptor keytabDescriptor2 = createNiceMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").once();
-    expect(keytabDescriptor2.getOwnerName()).andReturn("service2").once();
-    expect(keytabDescriptor2.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor2.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").once();
-    expect(keytabDescriptor2.isCachable()).andReturn(false).once();
-
-    final KerberosIdentityDescriptor identityDescriptor1 = createNiceMock(KerberosIdentityDescriptor.class);
-    expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).once();
-    expect(identityDescriptor1.getKeytabDescriptor()).andReturn(keytabDescriptor1).once();
-
-    final KerberosIdentityDescriptor identityDescriptor2 = createNiceMock(KerberosIdentityDescriptor.class);
-    expect(identityDescriptor2.getPrincipalDescriptor()).andReturn(principalDescriptor2).once();
-    expect(identityDescriptor2.getKeytabDescriptor()).andReturn(keytabDescriptor2).once();
-
-    final KerberosComponentDescriptor componentDescriptor1 = createNiceMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor1.getIdentities(true)).
+    final KerberosIdentityDescriptor identityDescriptor2 = createMock(KerberosIdentityDescriptor.class);
+
+    final KerberosComponentDescriptor componentDescriptor1 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor1.getIdentities(false)).
         andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
           add(identityDescriptor1);
         }}).once();
 
-    final KerberosComponentDescriptor componentDescriptor2 = createNiceMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor2.getIdentities(true)).
+    final KerberosComponentDescriptor componentDescriptor2 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor2.getIdentities(false)).
         andReturn(new ArrayList<KerberosIdentityDescriptor>() {{
           add(identityDescriptor2);
         }}).once();
 
-    final KerberosServiceDescriptor serviceDescriptor1 = createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
     expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).once();
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).once();
 
-    final KerberosServiceDescriptor serviceDescriptor2 = createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosServiceDescriptor serviceDescriptor2 = createMock(KerberosServiceDescriptor.class);
     expect(serviceDescriptor2.getComponent("COMPONENT2")).andReturn(componentDescriptor2).once();
+    expect(serviceDescriptor2.getIdentities(false)).andReturn(null).once();
 
-    final KerberosDescriptor kerberosDescriptor = createNiceMock(KerberosDescriptor.class);
+    final KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
+    expect(kerberosDescriptor.getService("KERBEROS")).andReturn(null).once();
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).once();
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).once();
 
@@ -1708,6 +1524,11 @@ public class KerberosHelperTest extends EasyMockSupport {
     if (mockRequestStageContainer) {
       // This is a STRICT mock to help ensure that the end result is what we want.
       requestStageContainer = createStrictMock(RequestStageContainer.class);
+      // Create Preparation Stage
+      expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+      expect(requestStageContainer.getId()).andReturn(1L).once();
+      requestStageContainer.addStages(anyObject(List.class));
+      expectLastCall().once();
       // Create Principals Stage
       expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
       expect(requestStageContainer.getId()).andReturn(1L).once();
@@ -1871,6 +1692,8 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     expect(kerberosDescriptorFactory.createInstance(anyObject(Map.class)))
         .andReturn(kerberosDescriptor).once();
+
+    expect(metaInfo.getKerberosDescriptor("HDP", "2.2")).andReturn(null).once();
   }
 
   private void setupGetDescriptorFromStack(KerberosDescriptor kerberosDescriptor) throws Exception {
@@ -1918,11 +1741,12 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(sch1C.getServiceComponentName()).andReturn("COMPONENT1").anyTimes();
     expect(sch1C.getHostName()).andReturn("hostC").anyTimes();
 
-    final ServiceComponentHost sch2 = createStrictMock(ServiceComponentHost.class);
+    final ServiceComponentHost sch2 = createMock(ServiceComponentHost.class);
     expect(sch2.getServiceName()).andReturn("SERVICE2").anyTimes();
     expect(sch2.getServiceComponentName()).andReturn("COMPONENT3").anyTimes();
+    expect(sch2.getHostName()).andReturn("hostA").anyTimes();
 
-    final ServiceComponentHost sch3 = createStrictMock(ServiceComponentHost.class);
+    final ServiceComponentHost sch3 = createMock(ServiceComponentHost.class);
     expect(sch3.getServiceName()).andReturn("SERVICE3").anyTimes();
     expect(sch3.getServiceComponentName()).andReturn("COMPONENT3").anyTimes();
     expect(sch3.getHostName()).andReturn("hostA").anyTimes();
@@ -1947,30 +1771,30 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(5);
+        .times(1);
 
     final Service service1 = createStrictMock(Service.class);
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(5);
+        .times(1);
 
     final Service service2 = createStrictMock(Service.class);
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(5);
+        .times(1);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -2023,51 +1847,14 @@ public class KerberosHelperTest extends EasyMockSupport {
       }
     }}).anyTimes();
 
-    final Clusters clusters = injector.getInstance(Clusters.class);
-    expect(clusters.getHostsForCluster("c1"))
-        .andReturn(new HashMap<String, Host>() {
-          {
-            put("hostA", hostA);
-            put("hostB", hostB);
-            put("hostC", hostC);
-          }
-        })
-        .once();
-    expect(clusters.getHost("hostA"))
-        .andReturn(hostA)
-        .times(1);
-    expect(clusters.getHost("hostB"))
-        .andReturn(hostB)
-        .times(1);
-
     final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "hostA"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "hostB"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "hostC"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
     expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, null))
         .andReturn(Collections.<String, Map<String, String>>emptyMap())
         .times(3);
     expect(ambariManagementController.getRoleCommandOrder(cluster))
-        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .andReturn(createMock(RoleCommandOrder.class))
         .once();
 
-    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
-    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
-        .andReturn(new HashMap<String, Map<String, String>>() {
-          {
-            put("cluster-env", new HashMap<String, String>() {{
-              put("kerberos_domain", "FOOBAR.COM");
-            }});
-          }
-        })
-        .times(4);
-
     final KerberosPrincipalDescriptor principalDescriptor1a = createMock(KerberosPrincipalDescriptor.class);
     expect(principalDescriptor1a.getValue()).andReturn("component1a/_HOST@${realm}").anyTimes();
     expect(principalDescriptor1a.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
@@ -2087,22 +1874,8 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(principalDescriptor3.getConfiguration()).andReturn("service3-site/component3.kerberos.principal").anyTimes();
 
     final KerberosKeytabDescriptor keytabDescriptor1 = createMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").times(3);
-    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").times(3);
-    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").times(3);
-    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").times(3);
-    expect(keytabDescriptor1.getGroupAccess()).andReturn("").times(3);
-    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").times(3);
-    expect(keytabDescriptor1.isCachable()).andReturn(false).times(3);
 
     final KerberosKeytabDescriptor keytabDescriptor3 = createMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor3.getFile()).andReturn("${keytab_dir}/service3.keytab").once();
-    expect(keytabDescriptor3.getOwnerName()).andReturn("service3").once();
-    expect(keytabDescriptor3.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor3.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor3.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor3.getConfiguration()).andReturn("service3-site/component3.keytab.file").once();
-    expect(keytabDescriptor3.isCachable()).andReturn(false).times(1);
 
     final KerberosIdentityDescriptor identityDescriptor1a = createMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1a.getName()).andReturn("identity1a").anyTimes();
@@ -2123,48 +1896,31 @@ public class KerberosHelperTest extends EasyMockSupport {
       add(identityDescriptor1b);
     }};
     final KerberosComponentDescriptor componentDescriptor1 = createStrictMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor1.getIdentities(true)).andReturn(identityDescriptors1).times(1);
-    expect(componentDescriptor1.getConfigurations(true)).andReturn(null).times(1);
-    expect(componentDescriptor1.getIdentities(true)).andReturn(identityDescriptors1).times(1);
-    expect(componentDescriptor1.getConfigurations(true)).andReturn(null).times(1);
-    expect(componentDescriptor1.getIdentities(true)).andReturn(identityDescriptors1).times(1);
-    expect(componentDescriptor1.getConfigurations(true)).andReturn(null).times(1);
-    expect(componentDescriptor1.getIdentities(true)).andReturn(identityDescriptors1).times(1);
-    expect(componentDescriptor1.getAuthToLocalProperties()).andReturn(null).times(1);
+    expect(componentDescriptor1.getIdentities(false)).andReturn(identityDescriptors1).times(3);
 
     final ArrayList<KerberosIdentityDescriptor> identityDescriptors3 = new ArrayList<KerberosIdentityDescriptor>() {{
       add(identityDescriptor3);
     }};
     final KerberosComponentDescriptor componentDescriptor3 = createStrictMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor3.getIdentities(true)).andReturn(identityDescriptors3).times(1);
-    expect(componentDescriptor3.getConfigurations(true)).andReturn(null).times(1);
+    expect(componentDescriptor3.getIdentities(false)).andReturn(identityDescriptors3).times(1);
 
     final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
-    expect(serviceDescriptor1.getIdentities(true)).andReturn(null).times(4);
-    expect(serviceDescriptor1.getName()).andReturn("SERVICE1").times(1);
-    expect(serviceDescriptor1.getComponents()).andReturn(new HashMap<String, KerberosComponentDescriptor>(){{
-      put("COMPONENT1", componentDescriptor1);
-    }}).times(1);
-    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).times(3);
-    expect(serviceDescriptor1.getAuthToLocalProperties()).andReturn(null).once();
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).times(1);
+    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).times(1);
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).times(1);
+    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).times(1);
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).times(1);
+    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).times(1);
 
     final KerberosServiceDescriptor serviceDescriptor3 = createMock(KerberosServiceDescriptor.class);
-    expect(serviceDescriptor3.getIdentities(true)).andReturn(null).times(1);
-    expect(serviceDescriptor3.getName()).andReturn("SERVICE3").times(1);
-    expect(serviceDescriptor3.getComponent("COMPONENT3")).andReturn(componentDescriptor3).once();
+    expect(serviceDescriptor3.getIdentities(false)).andReturn(null).times(1);
+    expect(serviceDescriptor3.getComponent("COMPONENT3")).andReturn(componentDescriptor3).times(1);
 
     final KerberosDescriptor kerberosDescriptor = createStrictMock(KerberosDescriptor.class);
-    expect(kerberosDescriptor.getProperties()).andReturn(null).once();
-    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(3);
+    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(1);
     expect(kerberosDescriptor.getService("SERVICE3")).andReturn(serviceDescriptor3).times(1);
-    expect(kerberosDescriptor.getIdentities()).andReturn(null).once();
-    expect(kerberosDescriptor.getAuthToLocalProperties()).andReturn(null).once();
-
-    expect(kerberosDescriptor.getServices()).andReturn(new HashMap<String, KerberosServiceDescriptor>()
-    {{
-      put("SERVICE1", serviceDescriptor1);
-      put("SERVICE3", serviceDescriptor3);
-    }}).once();
+    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(1);
+    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(1);
 
     setupGetDescriptorFromCluster(kerberosDescriptor);
 
@@ -2188,6 +1944,11 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     // This is a STRICT mock to help ensure that the end result is what we want.
     final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
+    // Preparation Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
     // Create Principals Stage
     expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
@@ -2239,11 +2000,11 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(sch1.getServiceComponentName()).andReturn("COMPONENT1").anyTimes();
     expect(sch1.getHostName()).andReturn("host1").anyTimes();
 
-    final ServiceComponentHost sch2 = createStrictMock(ServiceComponentHost.class);
+    final ServiceComponentHost sch2 = createMock(ServiceComponentHost.class);
     expect(sch2.getServiceName()).andReturn("SERVICE2").anyTimes();
     expect(sch2.getServiceComponentName()).andReturn("COMPONENT3").anyTimes();
 
-    final ServiceComponentHost sch3 = createStrictMock(ServiceComponentHost.class);
+    final ServiceComponentHost sch3 = createMock(ServiceComponentHost.class);
     expect(sch3.getServiceName()).andReturn("SERVICE3").anyTimes();
     expect(sch3.getServiceComponentName()).andReturn("COMPONENT3").anyTimes();
     expect(sch3.getHostName()).andReturn("host1").anyTimes();
@@ -2260,30 +2021,30 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
     expect(serviceKerberos.getServiceComponents())
         .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
-        .times(3);
+        .times(1);
 
     final Service service1 = createStrictMock(Service.class);
     expect(service1.getName()).andReturn("SERVICE1").anyTimes();
     expect(service1.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
 
     final Service service2 = createStrictMock(Service.class);
     expect(service2.getName()).andReturn("SERVICE2").anyTimes();
     expect(service2.getServiceComponents())
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
-        .times(3);
+        .times(1);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -2321,40 +2082,14 @@ public class KerberosHelperTest extends EasyMockSupport {
       }
     }}).anyTimes();
 
-    final Clusters clusters = injector.getInstance(Clusters.class);
-    expect(clusters.getHostsForCluster("c1"))
-        .andReturn(new HashMap<String, Host>() {
-          {
-            put("host1", host);
-          }
-        })
-        .once();
-    expect(clusters.getHost("host1"))
-        .andReturn(host)
-        .once();
-
     final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
-    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host1"))
-        .andReturn(Collections.<String, Map<String, String>>emptyMap())
-        .once();
     expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, null))
         .andReturn(Collections.<String, Map<String, String>>emptyMap())
         .once();
     expect(ambariManagementController.getRoleCommandOrder(cluster))
-        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .andReturn(createMock(RoleCommandOrder.class))
         .once();
 
-    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
-    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
-        .andReturn(new HashMap<String, Map<String, String>>() {
-          {
-            put("cluster-env", new HashMap<String, String>() {{
-              put("kerberos_domain", "FOOBAR.COM");
-            }});
-          }
-        })
-        .times(2);
-
     final KerberosPrincipalDescriptor principalDescriptor1a = createMock(KerberosPrincipalDescriptor.class);
     expect(principalDescriptor1a.getValue()).andReturn("component1a/_HOST@${realm}").anyTimes();
     expect(principalDescriptor1a.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
@@ -2374,22 +2109,8 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(principalDescriptor3.getConfiguration()).andReturn("service3-site/component3.kerberos.principal").anyTimes();
 
     final KerberosKeytabDescriptor keytabDescriptor1 = createMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").once();
-    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").once();
-    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
-    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
 
     final KerberosKeytabDescriptor keytabDescriptor3 = createMock(KerberosKeytabDescriptor.class);
-    expect(keytabDescriptor3.getFile()).andReturn("${keytab_dir}/service3.keytab").once();
-    expect(keytabDescriptor3.getOwnerName()).andReturn("service3").once();
-    expect(keytabDescriptor3.getOwnerAccess()).andReturn("rw").once();
-    expect(keytabDescriptor3.getGroupName()).andReturn("hadoop").once();
-    expect(keytabDescriptor3.getGroupAccess()).andReturn("").once();
-    expect(keytabDescriptor3.getConfiguration()).andReturn("service3-site/component3.keytab.file").once();
-    expect(keytabDescriptor3.isCachable()).andReturn(false).once();
 
     final KerberosIdentityDescriptor identityDescriptor1a = createMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1a.getName()).andReturn("identity1a").anyTimes();
@@ -2410,44 +2131,25 @@ public class KerberosHelperTest extends EasyMockSupport {
       add(identityDescriptor1b);
     }};
     final KerberosComponentDescriptor componentDescriptor1 = createStrictMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor1.getIdentities(true)).andReturn(identityDescriptors1).times(1);
-    expect(componentDescriptor1.getConfigurations(true)).andReturn(null).times(1);
-    expect(componentDescriptor1.getIdentities(true)).andReturn(identityDescriptors1).times(1);
-    expect(componentDescriptor1.getAuthToLocalProperties()).andReturn(null).times(1);
+    expect(componentDescriptor1.getIdentities(false)).andReturn(identityDescriptors1).times(1);
 
     final ArrayList<KerberosIdentityDescriptor> identityDescriptors3 = new ArrayList<KerberosIdentityDescriptor>() {{
       add(identityDescriptor3);
     }};
     final KerberosComponentDescriptor componentDescriptor3 = createStrictMock(KerberosComponentDescriptor.class);
-    expect(componentDescriptor3.getIdentities(true)).andReturn(identityDescriptors3).times(1);
-    expect(componentDescriptor3.getConfigurations(true)).andReturn(null).times(1);
+    expect(componentDescriptor3.getIdentities(false)).andReturn(identityDescriptors3).times(1);
 
     final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
-    expect(serviceDescriptor1.getIdentities(true)).andReturn(null).times(1);
-    expect(serviceDescriptor1.getName()).andReturn("SERVICE1").times(1);
-    expect(serviceDescriptor1.getIdentities(true)).andReturn(null).times(1);
-    expect(serviceDescriptor1.getComponents()).andReturn(new HashMap<String, KerberosComponentDescriptor>(){{
-      put("COMPONENT1", componentDescriptor1);
-    }}).times(1);
-    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).once();
-    expect(serviceDescriptor1.getAuthToLocalProperties()).andReturn(null).once();
+    expect(serviceDescriptor1.getIdentities(false)).andReturn(null).times(1);
+    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).times(1);
 
     final KerberosServiceDescriptor serviceDescriptor3 = createMock(KerberosServiceDescriptor.class);
-    expect(serviceDescriptor3.getIdentities(true)).andReturn(null).times(1);
-    expect(serviceDescriptor3.getName()).andReturn("SERVICE3").times(1);
-    expect(serviceDescriptor3.getComponent("COMPONENT3")).andReturn(componentDescriptor3).once();
+    expect(serviceDescriptor3.getIdentities(false)).andReturn(null).times(1);
+    expect(serviceDescriptor3.getComponent("COMPONENT3")).andReturn(componentDescriptor3).times(1);
 
     final KerberosDescriptor kerberosDescriptor = createStrictMock(KerberosDescriptor.class);
-    expect(kerberosDescriptor.getProperties()).andReturn(null).once();
-    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).once();
-    expect(kerberosDescriptor.getService("SERVICE3")).andReturn(serviceDescriptor3).once();
-    expect(kerberosDescriptor.getIdentities()).andReturn(null).once();
-    expect(kerberosDescriptor.getAuthToLocalProperties()).andReturn(null).once();
-    expect(kerberosDescriptor.getServices()).andReturn(new HashMap<String, KerberosServiceDescriptor>()
-    {{
-      put("SERVCE1", serviceDescriptor1);
-      put("SERVCE2", serviceDescriptor3);
-    }}).once();
+    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).times(1);
+    expect(kerberosDescriptor.getService("SERVICE3")).andReturn(serviceDescriptor3).times(1);
 
     setupGetDescriptorFromCluster(kerberosDescriptor);
 
@@ -2471,6 +2173,11 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     // This is a STRICT mock to help ensure that the end result is what we want.
     final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
+    // Preparation Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
     // Delete Principals Stage
     expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
@@ -2507,22 +2214,22 @@ public class KerberosHelperTest extends EasyMockSupport {
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
     boolean managingIdentities = !Boolean.FALSE.equals(manageIdentities);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
 
-    if (manageIdentities != null) {
-      expect(kerberosEnvProperties.get("manage_identities"))
-          .andReturn((manageIdentities) ? "true" : "false")
-          .anyTimes();
-    }
+    expect(kerberosEnvProperties.get("manage_identities"))
+        .andReturn((manageIdentities == null)
+            ? null
+            : ((manageIdentities) ? "true" : "false"))
+        .anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Map<String,Object> attributeMap = new HashMap<String, Object>();
@@ -2644,7 +2351,7 @@ public class KerberosHelperTest extends EasyMockSupport {
           .andReturn(Collections.<String, Map<String, String>>emptyMap())
           .once();
       expect(ambariManagementController.getRoleCommandOrder(cluster))
-          .andReturn(createNiceMock(RoleCommandOrder.class))
+          .andReturn(createMock(RoleCommandOrder.class))
           .once();
 
       final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
@@ -2681,6 +2388,11 @@ public class KerberosHelperTest extends EasyMockSupport {
           })
           .anyTimes();
 
+      // Preparation Stage
+      expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+      expect(requestStageContainer.getId()).andReturn(1L).once();
+      requestStageContainer.addStages(anyObject(List.class));
+      expectLastCall().once();
       // Create Principals Stage
       expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
       expect(requestStageContainer.getId()).andReturn(1L).once();
@@ -2774,16 +2486,17 @@ public class KerberosHelperTest extends EasyMockSupport {
         .andReturn(Collections.<String, ServiceComponent>emptyMap())
         .times(2);
 
-    final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
+    final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
+    expect(kerberosEnvProperties.get("manage_identities")).andReturn(null).anyTimes();
 
-    final Config kerberosEnvConfig = createNiceMock(Config.class);
+    final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
+    final Map<String, String> krb5ConfProperties = createMock(Map.class);
 
-    final Config krb5ConfConfig = createNiceMock(Config.class);
+    final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -2841,7 +2554,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         .andReturn(Collections.<String, Map<String, String>>emptyMap())
         .once();
     expect(ambariManagementController.getRoleCommandOrder(cluster))
-        .andReturn(createNiceMock(RoleCommandOrder.class))
+        .andReturn(createMock(RoleCommandOrder.class))
         .once();
 
     final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
@@ -2880,7 +2593,12 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     // This is a STRICT mock to help ensure that the end result is what we want.
     final RequestStageContainer requestStageContainer = createStrictMock(RequestStageContainer.class);
-    // Delete  Principals Stage
+    // Preparation Stage
+    expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
+    expect(requestStageContainer.getId()).andReturn(1L).once();
+    requestStageContainer.addStages(anyObject(List.class));
+    expectLastCall().once();
+    // Delete Principals Stage
     expect(requestStageContainer.getLastStageId()).andReturn(-1L).anyTimes();
     expect(requestStageContainer.getId()).andReturn(1L).once();
     requestStageContainer.addStages(anyObject(List.class));
@@ -2956,7 +2674,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(host2.getHostName()).andReturn("host2").anyTimes();
     expect(host2.getState()).andReturn(HostState.HEALTHY).anyTimes();
 
-    final ServiceComponent serviceComponentKerberosClient = createMock(ServiceComponent.class);
+    final ServiceComponent serviceComponentKerberosClient = createNiceMock(ServiceComponent.class);
     expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
     expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient1)).anyTimes();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 409b955..ebee2e3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -740,6 +740,80 @@ public class ClusterTest {
   }
 
   @Test
+  public void testGetServiceComponentHosts_ForService() throws Exception {
+    createDefaultCluster();
+
+    Service s = serviceFactory.createNew(c1, "HDFS");
+    c1.addService(s);
+    s.persist();
+
+    ServiceComponent scNN = serviceComponentFactory.createNew(s, "NAMENODE");
+    s.addServiceComponent(scNN);
+    scNN.persist();
+    ServiceComponentHost schNNH1 = serviceComponentHostFactory.createNew(scNN, "h1");
+    scNN.addServiceComponentHost(schNNH1);
+    schNNH1.persist();
+
+    ServiceComponent scDN = serviceComponentFactory.createNew(s, "DATANODE");
+    s.addServiceComponent(scDN);
+    scDN.persist();
+    ServiceComponentHost scDNH1 = serviceComponentHostFactory.createNew(scDN, "h1");
+    scDN.addServiceComponentHost(scDNH1);
+    scDNH1.persist();
+    ServiceComponentHost scDNH2 = serviceComponentHostFactory.createNew(scDN, "h2");
+    scDN.addServiceComponentHost(scDNH2);
+    scDNH2.persist();
+
+    List<ServiceComponentHost> scHosts;
+
+    scHosts = c1.getServiceComponentHosts("HDFS", null);
+    Assert.assertEquals(3, scHosts.size());
+
+    scHosts = c1.getServiceComponentHosts("UNKNOWN SERVICE", null);
+    Assert.assertEquals(0, scHosts.size());
+  }
+
+  @Test
+  public void testGetServiceComponentHosts_ForServiceComponent() throws Exception {
+    createDefaultCluster();
+
+    Service s = serviceFactory.createNew(c1, "HDFS");
+    c1.addService(s);
+    s.persist();
+
+    ServiceComponent scNN = serviceComponentFactory.createNew(s, "NAMENODE");
+    s.addServiceComponent(scNN);
+    scNN.persist();
+    ServiceComponentHost schNNH1 = serviceComponentHostFactory.createNew(scNN, "h1");
+    scNN.addServiceComponentHost(schNNH1);
+    schNNH1.persist();
+
+    ServiceComponent scDN = serviceComponentFactory.createNew(s, "DATANODE");
+    s.addServiceComponent(scDN);
+    scDN.persist();
+    ServiceComponentHost scDNH1 = serviceComponentHostFactory.createNew(scDN, "h1");
+    scDN.addServiceComponentHost(scDNH1);
+    scDNH1.persist();
+    ServiceComponentHost scDNH2 = serviceComponentHostFactory.createNew(scDN, "h2");
+    scDN.addServiceComponentHost(scDNH2);
+    scDNH2.persist();
+
+    List<ServiceComponentHost> scHosts;
+
+    scHosts = c1.getServiceComponentHosts("HDFS", "DATANODE");
+    Assert.assertEquals(2, scHosts.size());
+
+    scHosts = c1.getServiceComponentHosts("HDFS", "UNKNOWN COMPONENT");
+    Assert.assertEquals(0, scHosts.size());
+
+    scHosts = c1.getServiceComponentHosts("UNKNOWN SERVICE", "DATANODE");
+    Assert.assertEquals(0, scHosts.size());
+
+    scHosts = c1.getServiceComponentHosts("UNKNOWN SERVICE", "UNKNOWN COMPONENT");
+    Assert.assertEquals(0, scHosts.size());
+  }
+
+  @Test
   public void testGetAndSetConfigs() throws Exception {
     createDefaultCluster();
 


[5/5] ambari git commit: AMBARI-11396. Kerberos: UI shows Kerberize Cluster step as failed with a retry button, but the backend keeps moving forward to Kerberize the cluster (rlevas)

Posted by rl...@apache.org.
AMBARI-11396. Kerberos: UI shows Kerberize Cluster step as failed with a retry button, but the backend keeps moving forward to Kerberize the cluster (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e3acc7f0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e3acc7f0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e3acc7f0

Branch: refs/heads/trunk
Commit: e3acc7f06ab42bdfa84d59880d7f6c76f78277fc
Parents: 7e88541
Author: Robert Levas <rl...@hortonworks.com>
Authored: Fri May 29 19:45:55 2015 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Fri May 29 19:48:15 2015 -0400

----------------------------------------------------------------------
 .../ambari/server/agent/HeartBeatHandler.java   |  103 +-
 .../server/controller/ControllerModule.java     |    1 +
 .../server/controller/KerberosHelper.java       | 2844 +-----------------
 .../server/controller/KerberosHelperImpl.java   | 2811 +++++++++++++++++
 .../AbstractPrepareKerberosServerAction.java    |  192 ++
 .../kerberos/CreateKeytabFilesServerAction.java |   10 +-
 .../kerberos/KerberosServerAction.java          |    9 +
 .../PrepareDisableKerberosServerAction.java     |  250 ++
 .../PrepareEnableKerberosServerAction.java      |  183 ++
 .../PrepareKerberosIdentitiesServerAction.java  |  111 +
 .../org/apache/ambari/server/state/Cluster.java |   11 +
 .../server/state/cluster/ClusterImpl.java       |   24 +
 .../server/controller/KerberosHelperTest.java   |  650 ++--
 .../server/state/cluster/ClusterTest.java       |   74 +
 14 files changed, 4003 insertions(+), 3270 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index 2474c3d..d5847fc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -1095,75 +1095,78 @@ public class HeartBeatHandler {
    * @throws AmbariException
    */
   void injectKeytab(ExecutionCommand ec, String command, String targetHost) throws AmbariException {
-    List<Map<String, String>> kcp = ec.getKerberosCommandParams();
     String dataDir = ec.getCommandParams().get(KerberosServerAction.DATA_DIRECTORY);
-    KerberosIdentityDataFileReader reader = null;
 
-    try {
-      reader = kerberosIdentityDataFileReaderFactory.createKerberosIdentityDataFileReader(new File(dataDir, KerberosIdentityDataFileReader.DATA_FILE_NAME));
+    if(dataDir != null) {
+      KerberosIdentityDataFileReader reader = null;
+      List<Map<String, String>> kcp = ec.getKerberosCommandParams();
+
+      try {
+        reader = kerberosIdentityDataFileReaderFactory.createKerberosIdentityDataFileReader(new File(dataDir, KerberosIdentityDataFileReader.DATA_FILE_NAME));
 
-      for (Map<String, String> record : reader) {
-        String hostName = record.get(KerberosIdentityDataFileReader.HOSTNAME);
+        for (Map<String, String> record : reader) {
+          String hostName = record.get(KerberosIdentityDataFileReader.HOSTNAME);
 
-        if (targetHost.equalsIgnoreCase(hostName)) {
+          if (targetHost.equalsIgnoreCase(hostName)) {
 
-          if ("SET_KEYTAB".equalsIgnoreCase(command)) {
-            String keytabFilePath = record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
+            if ("SET_KEYTAB".equalsIgnoreCase(command)) {
+              String keytabFilePath = record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
 
-            if (keytabFilePath != null) {
+              if (keytabFilePath != null) {
 
-              String sha1Keytab = DigestUtils.sha1Hex(keytabFilePath);
-              File keytabFile = new File(dataDir + File.separator + hostName + File.separator + sha1Keytab);
+                String sha1Keytab = DigestUtils.sha1Hex(keytabFilePath);
+                File keytabFile = new File(dataDir + File.separator + hostName + File.separator + sha1Keytab);
 
-              if (keytabFile.canRead()) {
-                Map<String, String> keytabMap = new HashMap<String, String>();
-                String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
-                String isService = record.get(KerberosIdentityDataFileReader.SERVICE);
+                if (keytabFile.canRead()) {
+                  Map<String, String> keytabMap = new HashMap<String, String>();
+                  String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
+                  String isService = record.get(KerberosIdentityDataFileReader.SERVICE);
 
-                keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
-                keytabMap.put(KerberosIdentityDataFileReader.SERVICE, isService);
-                keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, record.get(KerberosIdentityDataFileReader.COMPONENT));
-                keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, principal);
-                keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, keytabFilePath);
-                keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME));
-                keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS));
-                keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME));
-                keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS));
+                  keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
+                  keytabMap.put(KerberosIdentityDataFileReader.SERVICE, isService);
+                  keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, record.get(KerberosIdentityDataFileReader.COMPONENT));
+                  keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, principal);
+                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, keytabFilePath);
+                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME));
+                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS));
+                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME));
+                  keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_ACCESS));
 
-                BufferedInputStream bufferedIn = new BufferedInputStream(new FileInputStream(keytabFile));
-                byte[] keytabContent = IOUtils.toByteArray(bufferedIn);
-                String keytabContentBase64 = Base64.encodeBase64String(keytabContent);
-                keytabMap.put(KerberosServerAction.KEYTAB_CONTENT_BASE64, keytabContentBase64);
+                  BufferedInputStream bufferedIn = new BufferedInputStream(new FileInputStream(keytabFile));
+                  byte[] keytabContent = IOUtils.toByteArray(bufferedIn);
+                  String keytabContentBase64 = Base64.encodeBase64String(keytabContent);
+                  keytabMap.put(KerberosServerAction.KEYTAB_CONTENT_BASE64, keytabContentBase64);
 
-                kcp.add(keytabMap);
+                  kcp.add(keytabMap);
+                }
               }
-            }
-          } else if ("REMOVE_KEYTAB".equalsIgnoreCase(command)) {
-            Map<String, String> keytabMap = new HashMap<String, String>();
+            } else if ("REMOVE_KEYTAB".equalsIgnoreCase(command)) {
+              Map<String, String> keytabMap = new HashMap<String, String>();
 
-            keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
-            keytabMap.put(KerberosIdentityDataFileReader.SERVICE, record.get(KerberosIdentityDataFileReader.SERVICE));
-            keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, record.get(KerberosIdentityDataFileReader.COMPONENT));
-            keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, record.get(KerberosIdentityDataFileReader.PRINCIPAL));
-            keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH));
+              keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
+              keytabMap.put(KerberosIdentityDataFileReader.SERVICE, record.get(KerberosIdentityDataFileReader.SERVICE));
+              keytabMap.put(KerberosIdentityDataFileReader.COMPONENT, record.get(KerberosIdentityDataFileReader.COMPONENT));
+              keytabMap.put(KerberosIdentityDataFileReader.PRINCIPAL, record.get(KerberosIdentityDataFileReader.PRINCIPAL));
+              keytabMap.put(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH, record.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH));
 
-            kcp.add(keytabMap);
+              kcp.add(keytabMap);
+            }
           }
         }
-      }
-    } catch (IOException e) {
-      throw new AmbariException("Could not inject keytabs to enable kerberos");
-    }  finally {
-      if (reader != null) {
-        try {
-          reader.close();
-        } catch (Throwable t) {
-          // ignored
+      } catch (IOException e) {
+        throw new AmbariException("Could not inject keytabs to enable kerberos");
+      } finally {
+        if (reader != null) {
+          try {
+            reader.close();
+          } catch (Throwable t) {
+            // ignored
+          }
         }
       }
-    }
 
-    ec.setKerberosCommandParams(kcp);
+      ec.setKerberosCommandParams(kcp);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index 72c33bd..ac651ce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -284,6 +284,7 @@ public class ControllerModule extends AbstractModule {
     bind(KerberosOperationHandlerFactory.class);
     bind(KerberosDescriptorFactory.class);
     bind(KerberosServiceDescriptorFactory.class);
+    bind(KerberosHelper.class).to(KerberosHelperImpl.class);
 
     bind(Configuration.class).toInstance(configuration);
     bind(OsFamily.class).toInstance(os_family);


[3/5] ambari git commit: AMBARI-11396. Kerberos: UI shows Kerberize Cluster step as failed with a retry button, but the backend keeps moving forward to Kerberize the cluster (rlevas)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
new file mode 100644
index 0000000..dc5fc75
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -0,0 +1,2811 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.Role;
+import org.apache.ambari.server.RoleCommand;
+import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.RequestFactory;
+import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.internal.ArtifactResourceProvider;
+import org.apache.ambari.server.controller.internal.RequestImpl;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
+import org.apache.ambari.server.controller.internal.RequestStageContainer;
+import org.apache.ambari.server.controller.spi.ClusterController;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.NoSuchResourceException;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.metadata.RoleCommandOrder;
+import org.apache.ambari.server.serveraction.ServerAction;
+import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
+import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
+import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
+import org.apache.ambari.server.serveraction.kerberos.FinalizeKerberosServerAction;
+import org.apache.ambari.server.serveraction.kerberos.KDCType;
+import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriter;
+import org.apache.ambari.server.serveraction.kerberos.KerberosAdminAuthenticationException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosCredential;
+import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriterFactory;
+import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosKDCConnectionException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosLDAPContainerException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosMissingAdminCredentialsException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandler;
+import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandlerFactory;
+import org.apache.ambari.server.serveraction.kerberos.KerberosRealmException;
+import org.apache.ambari.server.serveraction.kerberos.KerberosServerAction;
+import org.apache.ambari.server.serveraction.kerberos.PrepareDisableKerberosServerAction;
+import org.apache.ambari.server.serveraction.kerberos.PrepareEnableKerberosServerAction;
+import org.apache.ambari.server.serveraction.kerberos.PrepareKerberosIdentitiesServerAction;
+import org.apache.ambari.server.serveraction.kerberos.UpdateKerberosConfigsServerAction;
+import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
+import org.apache.ambari.server.stageplanner.RoleGraph;
+import org.apache.ambari.server.stageplanner.RoleGraphFactory;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.HostState;
+import org.apache.ambari.server.state.SecurityState;
+import org.apache.ambari.server.state.SecurityType;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
+import org.apache.ambari.server.state.kerberos.VariableReplacementHelper;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent;
+import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+@Singleton
+public class KerberosHelperImpl implements KerberosHelper {
+
+  private static final String BASE_LOG_DIR = "/tmp/ambari";
+
+  private static final Logger LOG = LoggerFactory.getLogger(KerberosHelperImpl.class);
+
+  /**
+   * name of the property used to hold the service check identifier value, used when creating and
+   * destroying the (unique) service check identity.
+   */
+  private static final String SERVICE_CHECK_IDENTIFIER = "_kerberos_internal_service_check_identifier";
+
+  @Inject
+  private AmbariCustomCommandExecutionHelper customCommandExecutionHelper;
+
+  @Inject
+  private AmbariManagementController ambariManagementController;
+
+  @Inject
+  private AmbariMetaInfo ambariMetaInfo;
+
+  @Inject
+  private ActionManager actionManager;
+
+  @Inject
+  private RequestFactory requestFactory;
+
+  @Inject
+  private StageFactory stageFactory;
+
+  @Inject
+  private RoleGraphFactory roleGraphFactory;
+
+  @Inject
+  private Clusters clusters;
+
+  @Inject
+  private ConfigHelper configHelper;
+
+  @Inject
+  private VariableReplacementHelper variableReplacementHelper;
+
+  @Inject
+  private Configuration configuration;
+
+  @Inject
+  private KerberosOperationHandlerFactory kerberosOperationHandlerFactory;
+
+  @Inject
+  private KerberosDescriptorFactory kerberosDescriptorFactory;
+
+  @Inject
+  private KerberosIdentityDataFileWriterFactory kerberosIdentityDataFileWriterFactory;
+
+  /**
+   * Used to get kerberos descriptors associated with the cluster or stack.
+   * Currently not available via injection.
+   */
+  private static ClusterController clusterController = null;
+
+  @Override
+  public RequestStageContainer toggleKerberos(Cluster cluster, SecurityType securityType,
+                                              RequestStageContainer requestStageContainer,
+                                              Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException {
+
+    KerberosDetails kerberosDetails = getKerberosDetails(cluster, manageIdentities);
+
+    // Update KerberosDetails with the new security type - the current one in the cluster is the "old" value
+    kerberosDetails.setSecurityType(securityType);
+
+    if (securityType == SecurityType.KERBEROS) {
+      LOG.info("Configuring Kerberos for realm {} on cluster, {}", kerberosDetails.getDefaultRealm(), cluster.getClusterName());
+      requestStageContainer = handle(cluster, kerberosDetails, null, null, null, requestStageContainer, new EnableKerberosHandler());
+    } else if (securityType == SecurityType.NONE) {
+      LOG.info("Disabling Kerberos from cluster, {}", cluster.getClusterName());
+      requestStageContainer = handle(cluster, kerberosDetails, null, null, null, requestStageContainer, new DisableKerberosHandler());
+    } else {
+      throw new AmbariException(String.format("Unexpected security type value: %s", securityType.name()));
+    }
+
+    return requestStageContainer;
+  }
+
+  @Override
+  public RequestStageContainer executeCustomOperations(Cluster cluster, Map<String, String> requestProperties,
+                                                       RequestStageContainer requestStageContainer,
+                                                       Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException {
+
+    if (requestProperties != null) {
+
+      for (SupportedCustomOperation operation : SupportedCustomOperation.values()) {
+        if (requestProperties.containsKey(operation.name().toLowerCase())) {
+          String value = requestProperties.get(operation.name().toLowerCase());
+
+          // The operation specific logic is kept in one place and described here
+          switch (operation) {
+            case REGENERATE_KEYTABS:
+              if (cluster.getSecurityType() != SecurityType.KERBEROS) {
+                throw new AmbariException(String.format("Custom operation %s can only be requested with the security type cluster property: %s", operation.name(), SecurityType.KERBEROS.name()));
+              }
+
+              if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
+                requestStageContainer = handle(cluster, getKerberosDetails(cluster, manageIdentities), null, null, null,
+                    requestStageContainer, new CreatePrincipalsAndKeytabsHandler(true));
+              } else if ("missing".equalsIgnoreCase(value)) {
+                requestStageContainer = handle(cluster, getKerberosDetails(cluster, manageIdentities), null, null, null,
+                    requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
+              } else {
+                throw new AmbariException(String.format("Unexpected directive value: %s", value));
+              }
+
+              break;
+
+            default: // No other operations are currently supported
+              throw new AmbariException(String.format("Custom operation not supported: %s", operation.name()));
+          }
+        }
+      }
+    }
+
+    return requestStageContainer;
+  }
+
+
+  @Override
+  public RequestStageContainer ensureIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                                Collection<String> identityFilter, Set<String> hostsToForceKerberosOperations,
+                                                RequestStageContainer requestStageContainer, Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException {
+    return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, identityFilter,
+        hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
+  }
+
+  @Override
+  public RequestStageContainer deleteIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                                Collection<String> identityFilter, RequestStageContainer requestStageContainer,
+                                                Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException {
+    return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, identityFilter, null,
+        requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
+  }
+
+  @Override
+  public void configureService(Cluster cluster, ServiceComponentHost serviceComponentHost)
+      throws AmbariException, KerberosInvalidConfigurationException {
+
+    KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
+
+    // Set properties...
+    String serviceName = serviceComponentHost.getServiceName();
+    KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
+    KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
+
+    if (serviceDescriptor != null) {
+      Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
+      Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
+      Map<String, Map<String, String>> configurations = calculateConfigurations(cluster,
+          serviceComponentHost.getHostName(), kerberosDescriptorProperties);
+
+      Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
+      for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
+        if (componentDescriptor != null) {
+          Map<String, Map<String, String>> identityConfigurations;
+          List<KerberosIdentityDescriptor> identities;
+
+          identities = serviceDescriptor.getIdentities(true);
+          identityConfigurations = getConfigurations(identities);
+          if (identityConfigurations != null) {
+            for (Map.Entry<String, Map<String, String>> entry : identityConfigurations.entrySet()) {
+              mergeConfigurations(kerberosConfigurations, entry.getKey(), entry.getValue(), configurations);
+            }
+          }
+
+          identities = componentDescriptor.getIdentities(true);
+          identityConfigurations = getConfigurations(identities);
+          if (identityConfigurations != null) {
+            for (Map.Entry<String, Map<String, String>> entry : identityConfigurations.entrySet()) {
+              mergeConfigurations(kerberosConfigurations, entry.getKey(), entry.getValue(), configurations);
+            }
+          }
+
+          mergeConfigurations(kerberosConfigurations,
+              componentDescriptor.getConfigurations(true), configurations);
+        }
+      }
+
+      setAuthToLocalRules(kerberosDescriptor, cluster, kerberosDetails.getDefaultRealm(), configurations, kerberosConfigurations);
+
+      for (Map.Entry<String, Map<String, String>> entry : kerberosConfigurations.entrySet()) {
+        configHelper.updateConfigType(cluster, ambariManagementController, entry.getKey(), entry.getValue(), null,
+            ambariManagementController.getAuthName(), String.format("Enabling Kerberos for %s", serviceName));
+      }
+    }
+  }
+
+  @Override
+  public RequestStageContainer createTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
+                                                  RequestStageContainer requestStageContainer)
+      throws KerberosOperationException, AmbariException {
+    return handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
+  }
+
+  @Override
+  public RequestStageContainer deleteTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
+                                                  RequestStageContainer requestStageContainer)
+      throws KerberosOperationException, AmbariException {
+    requestStageContainer = handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
+
+    // Clear the Kerberos service check identifier
+    setKerberosServiceCheckIdentifier(cluster, null);
+
+    return requestStageContainer;
+  }
+
+  @Override
+  public void validateKDCCredentials(Cluster cluster) throws KerberosMissingAdminCredentialsException,
+      KerberosAdminAuthenticationException,
+      KerberosInvalidConfigurationException,
+      AmbariException {
+    validateKDCCredentials(null, cluster);
+  }
+
+  @Override
+  public void setAuthToLocalRules(KerberosDescriptor kerberosDescriptor, Cluster cluster, String realm,
+                                  Map<String, Map<String, String>> existingConfigurations,
+                                  Map<String, Map<String, String>> kerberosConfigurations)
+      throws AmbariException {
+
+    if (kerberosDescriptor != null) {
+
+      Set<String> authToLocalProperties;
+      Set<String> authToLocalPropertiesToSet = new HashSet<String>();
+
+      // Determine which properties need to be set
+      AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder();
+
+      addIdentities(authToLocalBuilder, kerberosDescriptor.getIdentities(), null, existingConfigurations);
+
+      authToLocalProperties = kerberosDescriptor.getAuthToLocalProperties();
+      if (authToLocalProperties != null) {
+        authToLocalPropertiesToSet.addAll(authToLocalProperties);
+      }
+
+      Map<String, KerberosServiceDescriptor> services = kerberosDescriptor.getServices();
+      if (services != null) {
+        Map<String, Service> installedServices = cluster.getServices();
+
+        for (KerberosServiceDescriptor service : services.values()) {
+          if (installedServices.containsKey(service.getName())) {
+
+            addIdentities(authToLocalBuilder, service.getIdentities(true), null, existingConfigurations);
+
+            authToLocalProperties = service.getAuthToLocalProperties();
+            if (authToLocalProperties != null) {
+              authToLocalPropertiesToSet.addAll(authToLocalProperties);
+            }
+
+            Map<String, KerberosComponentDescriptor> components = service.getComponents();
+            if (components != null) {
+              for (KerberosComponentDescriptor component : components.values()) {
+                addIdentities(authToLocalBuilder, component.getIdentities(true), null, existingConfigurations);
+
+                authToLocalProperties = component.getAuthToLocalProperties();
+                if (authToLocalProperties != null) {
+                  authToLocalPropertiesToSet.addAll(authToLocalProperties);
+                }
+              }
+            }
+          }
+        }
+      }
+
+      if (!authToLocalPropertiesToSet.isEmpty()) {
+        for (String authToLocalProperty : authToLocalPropertiesToSet) {
+          String[] parts = authToLocalProperty.split("/");
+
+          if (parts.length == 2) {
+            AuthToLocalBuilder builder = authToLocalBuilder.copy();
+            String configType = parts[0];
+            String propertyName = parts[1];
+
+            // Add existing auth_to_local configuration, if set
+            Map<String, String> existingConfiguration = existingConfigurations.get(configType);
+            if (existingConfiguration != null) {
+              builder.addRules(existingConfiguration.get(propertyName));
+            }
+
+            // Add/update descriptor auth_to_local configuration, if set
+            Map<String, String> kerberosConfiguration = kerberosConfigurations.get(configType);
+            if (kerberosConfiguration != null) {
+              builder.addRules(kerberosConfiguration.get(propertyName));
+            } else {
+              kerberosConfiguration = new HashMap<String, String>();
+              kerberosConfigurations.put(configType, kerberosConfiguration);
+            }
+
+            kerberosConfiguration.put(propertyName, builder.generate(realm));
+          }
+        }
+      }
+    }
+  }
+
+
+  @Override
+  public List<ServiceComponentHost> getServiceComponentHostsToProcess(Cluster cluster,
+                                                                      KerberosDescriptor kerberosDescriptor,
+                                                                      Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                                                      Collection<String> identityFilter,
+                                                                      Command<Boolean, ServiceComponentHost> shouldProcessCommand)
+      throws AmbariException {
+    List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<ServiceComponentHost>();
+    Map<String, Service> services = cluster.getServices();
+
+    if ((services != null) && !services.isEmpty()) {
+      Collection<Host> hosts = cluster.getHosts();
+
+      if ((hosts != null) && !hosts.isEmpty()) {
+        // Iterate over the hosts in the cluster to find the components installed in each.  For each
+        // component (aka service component host - sch) determine the configuration updates and
+        // and the principals an keytabs to create.
+        for (Host host : hosts) {
+          String hostname = host.getHostName();
+
+          // Get a list of components on the current host
+          List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
+
+          if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
+
+            // Iterate over the components installed on the current host to get the service and
+            // component-level Kerberos descriptors in order to determine which principals,
+            // keytab files, and configurations need to be created or updated.
+            for (ServiceComponentHost sch : serviceComponentHosts) {
+              String serviceName = sch.getServiceName();
+              String componentName = sch.getServiceComponentName();
+
+              // If there is no filter or the filter contains the current service name...
+              if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey(serviceName)) {
+                Collection<String> componentFilter = (serviceComponentFilter == null) ? null : serviceComponentFilter.get(serviceName);
+                KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
+
+                if (serviceDescriptor != null) {
+                  // If there is no filter or the filter contains the current component name,
+                  // test to see if this component should be process by querying the handler...
+                  if (((componentFilter == null) || componentFilter.contains(componentName)) && shouldProcessCommand.invoke(sch)) {
+                    KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
+                    int identities = 0;
+
+                    // Add service-level principals (and keytabs)
+                    identities += filteredIdentitiesCount(serviceDescriptor.getIdentities(false), identityFilter);
+
+                    if (componentDescriptor != null) {
+                      // Add component-level principals (and keytabs)
+                      identities += filteredIdentitiesCount(componentDescriptor.getIdentities(false), identityFilter);
+                    }
+
+                    if (identities > 0) {
+                      serviceComponentHostsToProcess.add(sch);
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+
+    return serviceComponentHostsToProcess;
+  }
+
+
+  @Override
+  public Set<String> getHostsWithValidKerberosClient(Cluster cluster)
+      throws AmbariException {
+    Set<String> hostsWithValidKerberosClient = new HashSet<String>();
+    List<ServiceComponentHost> schKerberosClients = cluster.getServiceComponentHosts(Service.Type.KERBEROS.name(), Role.KERBEROS_CLIENT.name());
+
+    if (schKerberosClients != null) {
+      for (ServiceComponentHost sch : schKerberosClients) {
+        if (sch.getState() == State.INSTALLED) {
+          hostsWithValidKerberosClient.add(sch.getHostName());
+        }
+      }
+    }
+
+    return hostsWithValidKerberosClient;
+  }
+
+  @Override
+  public KerberosDescriptor getKerberosDescriptor(Cluster cluster) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+
+    // -------------------------------
+    // Get the default Kerberos descriptor from the stack, which is the same as the value from
+    // stacks/:stackName/versions/:version/artifacts/kerberos_descriptor
+    KerberosDescriptor defaultDescriptor = ambariMetaInfo.getKerberosDescriptor(stackId.getStackName(), stackId.getStackVersion());
+    // -------------------------------
+
+    // Get the user-supplied Kerberos descriptor from cluster/:clusterName/artifacts/kerberos_descriptor
+    KerberosDescriptor descriptor = null;
+
+    PredicateBuilder pb = new PredicateBuilder();
+    Predicate predicate = pb.begin().property("Artifacts/cluster_name").equals(cluster.getClusterName()).and().
+        property(ArtifactResourceProvider.ARTIFACT_NAME_PROPERTY).equals("kerberos_descriptor").
+        end().toPredicate();
+
+    synchronized (KerberosHelperImpl.class) {
+      if (clusterController == null) {
+        clusterController = ClusterControllerHelper.getClusterController();
+      }
+    }
+
+    ResourceProvider artifactProvider =
+        clusterController.ensureResourceProvider(Resource.Type.Artifact);
+
+    Request request = new RequestImpl(Collections.<String>emptySet(),
+        Collections.<Map<String, Object>>emptySet(), Collections.<String, String>emptyMap(), null);
+
+    Set<Resource> response = null;
+    try {
+      response = artifactProvider.getResources(request, predicate);
+    } catch (SystemException e) {
+      e.printStackTrace();
+      throw new AmbariException("An unknown error occurred while trying to obtain the cluster kerberos descriptor", e);
+    } catch (UnsupportedPropertyException e) {
+      e.printStackTrace();
+      throw new AmbariException("An unknown error occurred while trying to obtain the cluster kerberos descriptor", e);
+    } catch (NoSuchParentResourceException e) {
+      // parent cluster doesn't exist.  shouldn't happen since we have the cluster instance
+      e.printStackTrace();
+      throw new AmbariException("An unknown error occurred while trying to obtain the cluster kerberos descriptor", e);
+    } catch (NoSuchResourceException e) {
+      // no descriptor registered, use the default from the stack
+    }
+
+    if (response != null && !response.isEmpty()) {
+      Resource descriptorResource = response.iterator().next();
+      Map<String, Map<String, Object>> propertyMap = descriptorResource.getPropertiesMap();
+      if (propertyMap != null) {
+        Map<String, Object> artifactData = propertyMap.get(ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY);
+        Map<String, Object> artifactDataProperties = propertyMap.get(ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "/properties");
+        HashMap<String, Object> data = new HashMap<String, Object>();
+
+        if (artifactData != null) {
+          data.putAll(artifactData);
+        }
+
+        if (artifactDataProperties != null) {
+          data.put("properties", artifactDataProperties);
+        }
+
+        descriptor = kerberosDescriptorFactory.createInstance(data);
+      }
+    }
+    // -------------------------------
+
+    // -------------------------------
+    // Attempt to build and return a composite of the default Kerberos descriptor and the user-supplied
+    // Kerberos descriptor. If the default descriptor exists, overlay the user-supplied Kerberos
+    // descriptor on top of it (if it exists) and return the composite; else return the user-supplied
+    // Kerberos descriptor. If both values are null, null may be returned.
+    if (defaultDescriptor == null) {
+      return descriptor;
+    } else {
+      if (descriptor != null) {
+        defaultDescriptor.update(descriptor);
+      }
+      return defaultDescriptor;
+    }
+    // -------------------------------
+  }
+
+  @Override
+  public Map<String, Map<String, String>> mergeConfigurations(Map<String, Map<String, String>> configurations,
+                                                              Map<String, KerberosConfigurationDescriptor> updates,
+                                                              Map<String, Map<String, String>> replacements)
+      throws AmbariException {
+
+    if ((updates != null) && !updates.isEmpty()) {
+      if (configurations == null) {
+        configurations = new HashMap<String, Map<String, String>>();
+      }
+
+      for (Map.Entry<String, KerberosConfigurationDescriptor> entry : updates.entrySet()) {
+        String type = entry.getKey();
+        KerberosConfigurationDescriptor configurationDescriptor = entry.getValue();
+
+        if (configurationDescriptor != null) {
+          Map<String, String> updatedProperties = configurationDescriptor.getProperties();
+          mergeConfigurations(configurations, type, updatedProperties, replacements);
+        }
+      }
+    }
+
+    return configurations;
+  }
+
+  @Override
+  public int addIdentities(KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter,
+                           Collection<KerberosIdentityDescriptor> identities,
+                           Collection<String> identityFilter, String hostname, String serviceName,
+                           String componentName, Map<String, Map<String, String>> kerberosConfigurations,
+                           Map<String, Map<String, String>> configurations)
+      throws IOException {
+    int identitiesAdded = 0;
+
+    if (identities != null) {
+      for (KerberosIdentityDescriptor identity : identities) {
+        // If there is no filter or the filter contains the current identity's name...
+        if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
+          KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
+          String principal = null;
+          String principalType = null;
+          String principalConfiguration = null;
+
+          if (principalDescriptor != null) {
+            principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
+            principalType = principalDescriptor.getType().name().toLowerCase();
+            principalConfiguration = variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations);
+          }
+
+          if (principal != null) {
+            KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
+            String keytabFilePath = null;
+            String keytabFileOwnerName = null;
+            String keytabFileOwnerAccess = null;
+            String keytabFileGroupName = null;
+            String keytabFileGroupAccess = null;
+            String keytabFileConfiguration = null;
+            boolean keytabIsCachable = false;
+
+            if (keytabDescriptor != null) {
+              keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
+              keytabFileOwnerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
+              keytabFileOwnerAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations);
+              keytabFileGroupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
+              keytabFileGroupAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations);
+              keytabFileConfiguration = variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations);
+              keytabIsCachable = keytabDescriptor.isCachable();
+            }
+
+            // Append an entry to the action data file builder...
+            kerberosIdentityDataFileWriter.writeRecord(
+                hostname,
+                serviceName,
+                componentName,
+                principal,
+                principalType,
+                keytabFilePath,
+                keytabFileOwnerName,
+                keytabFileOwnerAccess,
+                keytabFileGroupName,
+                keytabFileGroupAccess,
+                (keytabIsCachable) ? "true" : "false");
+
+            // Add the principal-related configuration to the map of configurations
+            mergeConfiguration(kerberosConfigurations, principalConfiguration, principal, null);
+
+            // Add the keytab-related configuration to the map of configurations
+            mergeConfiguration(kerberosConfigurations, keytabFileConfiguration, keytabFilePath, null);
+
+            identitiesAdded++;
+          }
+        }
+      }
+    }
+
+    return identitiesAdded;
+  }
+
+  @Override
+  public Map<String, Map<String, String>> calculateConfigurations(Cluster cluster, String hostname,
+                                                                  Map<String, String> kerberosDescriptorProperties)
+      throws AmbariException {
+    // For a configuration type, both tag and an actual configuration can be stored
+    // Configurations from the tag is always expanded and then over-written by the actual
+    // global:version1:{a1:A1,b1:B1,d1:D1} + global:{a1:A2,c1:C1,DELETED_d1:x} ==>
+    // global:{a1:A2,b1:B1,c1:C1}
+    Map<String, Map<String, String>> configurations = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> configurationTags = ambariManagementController.findConfigurationTagsWithOverrides(cluster, hostname);
+
+    if (configurationTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) {
+      configHelper.applyCustomConfig(
+          configurations, Configuration.GLOBAL_CONFIG_TAG,
+          Configuration.RCA_ENABLED_PROPERTY, "false", false);
+    }
+
+    Map<String, Map<String, String>> configProperties = configHelper.getEffectiveConfigProperties(cluster, configurationTags);
+
+    // Apply the configurations saved with the Execution Cmd on top of
+    // derived configs - This will take care of all the hacks
+    for (Map.Entry<String, Map<String, String>> entry : configProperties.entrySet()) {
+      String type = entry.getKey();
+      Map<String, String> allLevelMergedConfig = entry.getValue();
+      Map<String, String> configuration = configurations.get(type);
+
+      if (configuration == null) {
+        configuration = new HashMap<String, String>(allLevelMergedConfig);
+      } else {
+        Map<String, String> mergedConfig = configHelper.getMergedConfig(allLevelMergedConfig, configuration);
+        configuration.clear();
+        configuration.putAll(mergedConfig);
+      }
+
+      configurations.put(type, configuration);
+    }
+
+    // A map to hold un-categorized properties.  This may come from the KerberosDescriptor
+    // and will also contain a value for the current host
+    Map<String, String> generalProperties = configurations.get("");
+    if (generalProperties == null) {
+      generalProperties = new HashMap<String, String>();
+      configurations.put("", generalProperties);
+    }
+
+    // If any properties are set in the calculated KerberosDescriptor, add them into the
+    // Map of configurations as an un-categorized type (using an empty string)
+    if (kerberosDescriptorProperties != null) {
+      generalProperties.putAll(kerberosDescriptorProperties);
+    }
+
+    // Add the current hostname under "host" and "hostname"
+    generalProperties.put("host", hostname);
+    generalProperties.put("hostname", hostname);
+
+    // Add the current cluster's name
+    generalProperties.put("cluster_name", cluster.getClusterName());
+
+    // add clusterHostInfo config
+    Map<String, String> componentHosts = new HashMap<String, String>();
+    for (Map.Entry<String, Service> service : cluster.getServices().entrySet()) {
+      for (Map.Entry<String, ServiceComponent> serviceComponent : service.getValue().getServiceComponents().entrySet()) {
+        if (StageUtils.getComponentToClusterInfoKeyMap().keySet().contains(serviceComponent.getValue().getName())) {
+          componentHosts.put(StageUtils.getComponentToClusterInfoKeyMap().get(serviceComponent.getValue().getName()),
+              StringUtils.join(serviceComponent.getValue().getServiceComponentHosts().keySet(), ","));
+        }
+      }
+    }
+    configurations.put("clusterHostInfo", componentHosts);
+
+    return configurations;
+  }
+
+  @Override
+  public Map<String, Collection<KerberosIdentityDescriptor>> getActiveIdentities(String clusterName,
+                                                                                 String hostName,
+                                                                                 String serviceName,
+                                                                                 String componentName,
+                                                                                 boolean replaceHostNames)
+      throws AmbariException {
+
+    if ((clusterName == null) || clusterName.isEmpty()) {
+      throw new IllegalArgumentException("Invalid argument, cluster name is required");
+    }
+
+    Cluster cluster = clusters.getCluster(clusterName);
+
+    if (cluster == null) {
+      throw new AmbariException(String.format("The cluster object for the cluster name %s is not available", clusterName));
+    }
+
+    Map<String, Collection<KerberosIdentityDescriptor>> activeIdentities = new HashMap<String, Collection<KerberosIdentityDescriptor>>();
+
+    Collection<String> hosts;
+
+    if (hostName == null) {
+      Map<String, Host> hostMap = clusters.getHostsForCluster(clusterName);
+      if (hostMap == null) {
+        hosts = null;
+      } else {
+        hosts = hostMap.keySet();
+      }
+    } else {
+      hosts = Collections.singleton(hostName);
+    }
+
+    if ((hosts != null) && !hosts.isEmpty()) {
+      KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
+
+      if (kerberosDescriptor != null) {
+        Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
+
+        for (String hostname : hosts) {
+          Map<String, KerberosIdentityDescriptor> hostActiveIdentities = new HashMap<String, KerberosIdentityDescriptor>();
+          List<KerberosIdentityDescriptor> identities = getActiveIdentities(cluster, hostname, serviceName, componentName, kerberosDescriptor);
+
+          if (!identities.isEmpty()) {
+            // Calculate the current host-specific configurations. These will be used to replace
+            // variables within the Kerberos descriptor data
+            Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, hostname, kerberosDescriptorProperties);
+
+            for (KerberosIdentityDescriptor identity : identities) {
+              KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
+              String principal = null;
+
+              if (principalDescriptor != null) {
+                principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
+              }
+
+              if (principal != null) {
+                KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
+                String keytabFile = null;
+
+                if (keytabDescriptor != null) {
+                  keytabFile = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
+                }
+
+                if (replaceHostNames) {
+                  principal = principal.replace("_HOST", hostname);
+                }
+
+                String uniqueKey = String.format("%s|%s", principal, (keytabFile == null) ? "" : keytabFile);
+
+                if (!hostActiveIdentities.containsKey(uniqueKey)) {
+                  KerberosPrincipalDescriptor resolvedPrincipalDescriptor =
+                      new KerberosPrincipalDescriptor(principal,
+                          principalDescriptor.getType(),
+                          variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations),
+                          variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
+
+                  KerberosKeytabDescriptor resolvedKeytabDescriptor;
+
+                  if (keytabFile == null) {
+                    resolvedKeytabDescriptor = null;
+                  } else {
+                    resolvedKeytabDescriptor =
+                        new KerberosKeytabDescriptor(
+                            keytabFile,
+                            variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations),
+                            variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations),
+                            variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations),
+                            variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations),
+                            variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations),
+                            keytabDescriptor.isCachable());
+                  }
+
+                  hostActiveIdentities.put(uniqueKey, new KerberosIdentityDescriptor(
+                      identity.getName(),
+                      resolvedPrincipalDescriptor,
+                      resolvedKeytabDescriptor));
+                }
+              }
+            }
+          }
+
+          activeIdentities.put(hostname, hostActiveIdentities.values());
+        }
+      }
+    }
+
+    return activeIdentities;
+  }
+
+  /**
+   * Validate the KDC admin credentials.
+   *
+   * @param kerberosDetails the KerberosDetails containing information about the Kerberos configuration
+   *                        for the cluster, if null, a new KerberosDetails will be created based on
+   *                        information found in the associated cluster
+   * @param cluster         associated cluster
+   * @throws AmbariException if any other error occurs while trying to validate the credentials
+   */
+  private void validateKDCCredentials(KerberosDetails kerberosDetails, Cluster cluster) throws KerberosMissingAdminCredentialsException,
+      KerberosAdminAuthenticationException,
+      KerberosInvalidConfigurationException,
+      AmbariException {
+
+    if (kerberosDetails == null) {
+      kerberosDetails = getKerberosDetails(cluster, null);
+    }
+
+    if (kerberosDetails.manageIdentities()) {
+      String credentials = getEncryptedAdministratorCredentials(cluster);
+      if (credentials == null) {
+        throw new KerberosMissingAdminCredentialsException(
+            "Missing KDC administrator credentials.\n" +
+                "The KDC administrator credentials must be set in session by updating the relevant Cluster resource." +
+                "This may be done by issuing a PUT to the api/v1/clusters/(cluster name) API entry point with the following payload:\n" +
+                "{\n" +
+                "  \"session_attributes\" : {\n" +
+                "    \"kerberos_admin\" : {\"principal\" : \"(PRINCIPAL)\", \"password\" : \"(PASSWORD)\"}\n" +
+                "  }\n" +
+                "}"
+        );
+      } else {
+        KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
+
+        if (operationHandler == null) {
+          throw new AmbariException("Failed to get an appropriate Kerberos operation handler.");
+        } else {
+          byte[] key = Integer.toHexString(cluster.hashCode()).getBytes();
+          KerberosCredential kerberosCredentials = KerberosCredential.decrypt(credentials, key);
+
+          boolean missingCredentials = false;
+          try {
+            operationHandler.open(kerberosCredentials, kerberosDetails.getDefaultRealm(), kerberosDetails.getKerberosEnvProperties());
+            // todo: this is really odd that open doesn't throw an exception if the credentials are missing
+            missingCredentials = !operationHandler.testAdministratorCredentials();
+          } catch (KerberosAdminAuthenticationException e) {
+            throw new KerberosAdminAuthenticationException(
+                "Invalid KDC administrator credentials.\n" +
+                    "The KDC administrator credentials must be set in session by updating the relevant Cluster resource." +
+                    "This may be done by issuing a PUT to the api/v1/clusters/(cluster name) API entry point with the following payload:\n" +
+                    "{\n" +
+                    "  \"session_attributes\" : {\n" +
+                    "    \"kerberos_admin\" : {\"principal\" : \"(PRINCIPAL)\", \"password\" : \"(PASSWORD)\"}\n" +
+                    "  }\n" +
+                    "}", e);
+          } catch (KerberosKDCConnectionException e) {
+            throw new KerberosInvalidConfigurationException(
+                "Failed to connect to KDC - " + e.getMessage() + "\n" +
+                    "Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
+                e);
+          } catch (KerberosRealmException e) {
+            throw new KerberosInvalidConfigurationException(
+                "Failed to find a KDC for the specified realm - " + e.getMessage() + "\n" +
+                    "Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
+                e);
+          } catch (KerberosLDAPContainerException e) {
+            throw new KerberosInvalidConfigurationException(
+                "The principal container was not specified\n" +
+                    "Set the 'container_dn' value in the kerberos-env configuration to correct this issue.",
+                e);
+          } catch (KerberosOperationException e) {
+            throw new AmbariException(e.getMessage(), e);
+          } finally {
+            try {
+              operationHandler.close();
+            } catch (KerberosOperationException e) {
+              // Ignore this...
+            }
+          }
+
+          // need to throw this outside of the try/catch so it isn't caught
+          if (missingCredentials) {
+            throw new KerberosMissingAdminCredentialsException(
+                "Invalid KDC administrator credentials.\n" +
+                    "The KDC administrator credentials must be set in session by updating the relevant Cluster resource." +
+                    "This may be done by issuing a PUT to the api/v1/clusters/(cluster name) API entry point with the following payload:\n" +
+                    "{\n" +
+                    "  \"session_attributes\" : {\n" +
+                    "    \"kerberos_admin\" : {\"principal\" : \"(PRINCIPAL)\", \"password\" : \"(PASSWORD)\"}\n" +
+                    "  }\n" +
+                    "}"
+            );
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Performs operations needed to process Kerberos related tasks on the relevant cluster.
+   * <p/>
+   * Iterates through the components installed on the relevant cluster to determine if work
+   * need to be done.  Calls into the Handler implementation to provide guidance and set up stages
+   * to perform the work needed to complete the relative action.
+   *
+   * @param cluster                        the relevant Cluster
+   * @param kerberosDetails                a KerberosDetails containing information about relevant Kerberos configuration
+   * @param serviceComponentFilter         a Map of service names to component names indicating the relevant
+   *                                       set of services and components - if null, no filter is relevant;
+   *                                       if empty, the filter indicates no relevant services or components
+   * @param identityFilter                 a Collection of identity names indicating the relevant identities -
+   *                                       if null, no filter is relevant; if empty, the filter indicates no
+   *                                       relevant identities
+   * @param hostsToForceKerberosOperations a set of host names on which it is expected that the
+   *                                       Kerberos client is or will be in the INSTALLED state by
+   *                                       the time the operations targeted for them are to be
+   *                                       executed - if empty or null, this no hosts will be
+   *                                       "forced"
+   * @param requestStageContainer          a RequestStageContainer to place generated stages, if needed -
+   *                                       if null a new RequestStageContainer will be created.
+   * @param handler                        a Handler to use to provide guidance and set up stages
+   *                                       to perform the work needed to complete the relative action
+   * @return the updated or a new RequestStageContainer containing the stages that need to be
+   * executed to complete this task; or null if no stages need to be executed.
+   * @throws AmbariException
+   * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
+   *                                               Kerberos-specific configuration details
+   */
+  @Transactional
+  private RequestStageContainer handle(Cluster cluster,
+                                       KerberosDetails kerberosDetails,
+                                       Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                       Collection<String> identityFilter,
+                                       Set<String> hostsToForceKerberosOperations,
+                                       RequestStageContainer requestStageContainer,
+                                       final Handler handler)
+      throws AmbariException, KerberosOperationException {
+
+    final KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
+    final SecurityState desiredSecurityState = handler.getNewServiceSecurityState();
+
+    List<ServiceComponentHost> schToProcess = getServiceComponentHostsToProcess(
+        cluster,
+        kerberosDescriptor,
+        serviceComponentFilter,
+        identityFilter,
+        new Command<Boolean, ServiceComponentHost>() {
+          @Override
+          public Boolean invoke(ServiceComponentHost arg) throws AmbariException {
+            return handler.shouldProcess(desiredSecurityState, arg);
+          }
+        });
+
+
+    // While iterating over all the ServiceComponentHosts find hosts that have KERBEROS_CLIENT
+    // components in the INSTALLED state and add them to the hostsWithValidKerberosClient Set.
+    // This is needed to help determine which hosts to perform actions for and create tasks for.
+    Set<String> hostsWithValidKerberosClient = null;
+
+    // Create a temporary directory to store metadata needed to complete this task.  Information
+    // such as which principals and keytabs files to create as well as what configurations need
+    // to be update are stored in data files in this directory. Any keytab files are stored in
+    // this directory until they are distributed to their appropriate hosts.
+    File dataDirectory = null;
+
+    // If there are ServiceComponentHosts to process...
+    if (!schToProcess.isEmpty()) {
+
+      validateKDCCredentials(kerberosDetails, cluster);
+
+      // Create a temporary directory to store metadata needed to complete this task.  Information
+      // such as which principals and keytabs files to create as well as what configurations need
+      // to be update are stored in data files in this directory. Any keytab files are stored in
+      // this directory until they are distributed to their appropriate hosts.
+      dataDirectory = createTemporaryDirectory();
+
+      hostsWithValidKerberosClient = getHostsWithValidKerberosClient(cluster);
+
+      // Ensure that that hosts that should be assumed to be in the correct state when needed are
+      // in the hostsWithValidKerberosClient collection.
+      if (hostsToForceKerberosOperations != null) {
+        hostsWithValidKerberosClient.addAll(hostsToForceKerberosOperations);
+      }
+    }
+
+    // Always set up the necessary stages to perform the tasks needed to complete the operation.
+    // Some stages may be no-ops, this is expected.
+    // Gather data needed to create stages and tasks...
+    Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
+    String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
+    Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster);
+    String hostParamsJson = StageUtils.getGson().toJson(hostParams);
+    String ambariServerHostname = StageUtils.getHostName();
+    ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
+        "AMBARI_SERVER",
+        ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
+        System.currentTimeMillis());
+    RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
+
+    // If a RequestStageContainer does not already exist, create a new one...
+    if (requestStageContainer == null) {
+      requestStageContainer = new RequestStageContainer(
+          actionManager.getNextRequestId(),
+          null,
+          requestFactory,
+          actionManager);
+    }
+
+    // Use the handler implementation to setup the relevant stages.
+    handler.createStages(cluster, clusterHostInfoJson,
+        hostParamsJson, event, roleCommandOrder, kerberosDetails, dataDirectory,
+        requestStageContainer, schToProcess, serviceComponentFilter, identityFilter,
+        hostsWithValidKerberosClient);
+
+    // Add the finalize stage...
+    handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
+        dataDirectory, roleCommandOrder, requestStageContainer);
+
+    // If all goes well, set the appropriate states on the relevant ServiceComponentHosts
+    for (ServiceComponentHost sch : schToProcess) {
+      // Update the desired and current states for the ServiceComponentHost
+      // using new state information from the the handler implementation
+      SecurityState newSecurityState;
+
+      newSecurityState = handler.getNewDesiredSCHSecurityState();
+      if (newSecurityState != null) {
+        sch.setDesiredSecurityState(newSecurityState);
+      }
+
+      newSecurityState = handler.getNewSCHSecurityState();
+      if (newSecurityState != null) {
+        sch.setSecurityState(newSecurityState);
+      }
+    }
+
+    // If all goes well, set all services to _desire_ to be secured or unsecured, depending on handler
+    if (desiredSecurityState != null) {
+      Map<String, Service> services = cluster.getServices();
+
+      for (Service service : services.values()) {
+        if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey(service.getName())) {
+          service.setSecurityState(desiredSecurityState);
+        }
+      }
+    }
+
+    return requestStageContainer;
+  }
+
+  private int filteredIdentitiesCount(List<KerberosIdentityDescriptor> identities, Collection<String> identityFilter) {
+
+    if ((identities == null) || identities.isEmpty()) {
+      return 0;
+    } else if (identityFilter == null) {
+      return identities.size();
+    } else {
+      int count = 0;
+
+      for (KerberosIdentityDescriptor identity : identities) {
+        if (identityFilter.contains(identity.getName())) {
+          count++;
+        }
+      }
+
+      return count;
+    }
+  }
+
+
+  /**
+   * Performs operations needed to process Kerberos related tasks to manage a (unique) test identity
+   * on the relevant cluster.
+   * <p/>
+   * If Ambari is not managing Kerberos identities, than this method does nothing.
+   *
+   * @param cluster               the relevant Cluster
+   * @param kerberosDetails       a KerberosDetails containing information about relevant Kerberos
+   *                              configuration
+   * @param commandParameters     the command parameters map used to read and/or write attributes
+   *                              related to this operation
+   * @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
+   *                              if null a new RequestStageContainer will be created.
+   * @param handler               a Handler to use to provide guidance and set up stages
+   *                              to perform the work needed to complete the relative action
+   * @return the updated or a new RequestStageContainer containing the stages that need to be
+   * executed to complete this task; or null if no stages need to be executed.
+   * @throws AmbariException
+   * @throws KerberosOperationException
+   */
+  private RequestStageContainer handleTestIdentity(Cluster cluster,
+                                                   KerberosDetails kerberosDetails,
+                                                   Map<String, String> commandParameters, RequestStageContainer requestStageContainer,
+                                                   Handler handler) throws AmbariException, KerberosOperationException {
+
+    if (kerberosDetails.manageIdentities()) {
+      if (commandParameters == null) {
+        throw new AmbariException("The properties map must not be null.  It is needed to store data related to the service check identity");
+      }
+
+      Map<String, Service> services = cluster.getServices();
+
+      if ((services != null) && !services.isEmpty()) {
+        String clusterName = cluster.getClusterName();
+        Map<String, Host> hosts = clusters.getHostsForCluster(clusterName);
+
+        if ((hosts != null) && !hosts.isEmpty()) {
+          List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<ServiceComponentHost>();
+          KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
+          KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter = null;
+          Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
+
+          // This is needed to help determine which hosts to perform actions for and create tasks for.
+          Set<String> hostsWithValidKerberosClient = getHostsWithValidKerberosClient(cluster);
+
+          // Create a temporary directory to store metadata needed to complete this task.  Information
+          // such as which principals and keytabs files to create as well as what configurations need
+          // to be update are stored in data files in this directory. Any keytab files are stored in
+          // this directory until they are distributed to their appropriate hosts.
+          File dataDirectory = createTemporaryDirectory();
+
+          // Create the file used to store details about principals and keytabs to create
+          File identityDataFile = new File(dataDirectory, KerberosIdentityDataFileWriter.DATA_FILE_NAME);
+
+          // Create a special identity for the test user
+          KerberosIdentityDescriptor identity = new KerberosIdentityDescriptor(new HashMap<String, Object>() {
+            {
+              put("principal",
+                  new HashMap<String, Object>() {
+                    {
+                      put("value", "${cluster-env/smokeuser}_${service_check_id}@${realm}");
+                      put("type", "user");
+                    }
+                  });
+              put("keytab",
+                  new HashMap<String, Object>() {
+                    {
+                      put("file", "${keytab_dir}/kerberos.service_check.${service_check_id}.keytab");
+
+                      put("owner", new HashMap<String, Object>() {{
+                        put("name", "${cluster-env/smokeuser}");
+                        put("access", "rw");
+                      }});
+
+                      put("group", new HashMap<String, Object>() {{
+                        put("name", "${cluster-env/user_group}");
+                        put("access", "r");
+                      }});
+
+                      put("cachable", "false");
+                    }
+                  });
+            }
+          });
+
+          // Get or create the unique service check identifier
+          String serviceCheckId = getKerberosServiceCheckIdentifier(cluster, true);
+
+          try {
+            // Iterate over the hosts in the cluster to find the components installed in each.  For each
+            // component (aka service component host - sch) determine the configuration updates and
+            // and the principals an keytabs to create.
+            for (Host host : hosts.values()) {
+              String hostname = host.getHostName();
+
+              // Get a list of components on the current host
+              List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
+
+              if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
+                // Calculate the current host-specific configurations. These will be used to replace
+                // variables within the Kerberos descriptor data
+                Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, hostname, kerberosDescriptorProperties);
+
+                // Set the unique service check identifier
+                configurations.get("").put("service_check_id", serviceCheckId);
+
+                // Iterate over the components installed on the current host to get the service and
+                // component-level Kerberos descriptors in order to determine which principals,
+                // keytab files, and configurations need to be created or updated.
+                for (ServiceComponentHost sch : serviceComponentHosts) {
+                  String serviceName = sch.getServiceName();
+                  String componentName = sch.getServiceComponentName();
+
+                  // If the current ServiceComponentHost represents the KERBEROS/KERBEROS_CLIENT and
+                  // indicates that the KERBEROS_CLIENT component is in the INSTALLED state, add the
+                  // current host to the set of hosts that should be handled...
+                  if (Service.Type.KERBEROS.name().equals(serviceName) &&
+                      Role.KERBEROS_CLIENT.name().equals(componentName) &&
+                      (sch.getState() == State.INSTALLED)) {
+                    hostsWithValidKerberosClient.add(hostname);
+
+                    int identitiesAdded = 0;
+
+                    // Lazily create the KerberosIdentityDataFileWriter instance...
+                    if (kerberosIdentityDataFileWriter == null) {
+                      kerberosIdentityDataFileWriter = kerberosIdentityDataFileWriterFactory.createKerberosIdentityDataFileWriter(identityDataFile);
+                    }
+
+                    // Add service-level principals (and keytabs)
+                    identitiesAdded += addIdentities(kerberosIdentityDataFileWriter, Collections.singleton(identity),
+                        null, hostname, serviceName, componentName, null, configurations);
+
+                    if (identitiesAdded > 0) {
+                      // Add the relevant principal name and keytab file data to the command params state
+                      if (!commandParameters.containsKey("principal_name") || !commandParameters.containsKey("keytab_file")) {
+                        commandParameters.put("principal_name",
+                            variableReplacementHelper.replaceVariables(identity.getPrincipalDescriptor().getValue(), configurations));
+                        commandParameters.put("keytab_file",
+                            variableReplacementHelper.replaceVariables(identity.getKeytabDescriptor().getFile(), configurations));
+                      }
+
+                      serviceComponentHostsToProcess.add(sch);
+                    }
+                  }
+                }
+              }
+            }
+          } catch (IOException e) {
+            String message = String.format("Failed to write index file - %s", identityDataFile.getAbsolutePath());
+            LOG.error(message);
+            throw new AmbariException(message, e);
+          } finally {
+            if (kerberosIdentityDataFileWriter != null) {
+              // Make sure the data file is closed
+              try {
+                kerberosIdentityDataFileWriter.close();
+              } catch (IOException e) {
+                LOG.warn("Failed to close the index file writer", e);
+              }
+            }
+          }
+
+          // If there are ServiceComponentHosts to process, make sure the administrator credentials
+          // are available
+          if (!serviceComponentHostsToProcess.isEmpty()) {
+            try {
+              validateKDCCredentials(kerberosDetails, cluster);
+            } catch (KerberosOperationException e) {
+              try {
+                FileUtils.deleteDirectory(dataDirectory);
+              } catch (Throwable t) {
+                LOG.warn(String.format("The data directory (%s) was not deleted due to an error condition - {%s}",
+                    dataDirectory.getAbsolutePath(), t.getMessage()), t);
+              }
+
+              throw e;
+            }
+          }
+
+          // Always set up the necessary stages to perform the tasks needed to complete the operation.
+          // Some stages may be no-ops, this is expected.
+          // Gather data needed to create stages and tasks...
+          Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
+          String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
+          Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster);
+          String hostParamsJson = StageUtils.getGson().toJson(hostParams);
+          String ambariServerHostname = StageUtils.getHostName();
+          ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
+              "AMBARI_SERVER",
+              ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
+              System.currentTimeMillis());
+          RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
+
+          // If a RequestStageContainer does not already exist, create a new one...
+          if (requestStageContainer == null) {
+            requestStageContainer = new RequestStageContainer(
+                actionManager.getNextRequestId(),
+                null,
+                requestFactory,
+                actionManager);
+          }
+
+          // Use the handler implementation to setup the relevant stages.
+          // Set the service/component filter to an empty map since the service/component processing
+          // was done above.
+          handler.createStages(cluster,
+              clusterHostInfoJson, hostParamsJson, event, roleCommandOrder, kerberosDetails,
+              dataDirectory, requestStageContainer, serviceComponentHostsToProcess,
+              Collections.<String, Collection<String>>emptyMap(), null, hostsWithValidKerberosClient);
+
+
+          handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
+              dataDirectory, roleCommandOrder, requestStageContainer);
+        }
+      }
+    }
+
+    return requestStageContainer;
+  }
+
+
+  /**
+   * Gathers the Kerberos-related data from configurations and stores it in a new KerberosDetails
+   * instance.
+   *
+   * @param cluster          the relevant Cluster
+   * @param manageIdentities a Boolean value indicating how to override the configured behavior
+   *                         of managing Kerberos identities; if null the configured behavior
+   *                         will not be overridden
+   * @return a new KerberosDetails with the collected configuration data
+   * @throws AmbariException
+   */
+  private KerberosDetails getKerberosDetails(Cluster cluster, Boolean manageIdentities)
+      throws KerberosInvalidConfigurationException, AmbariException {
+
+    KerberosDetails kerberosDetails = new KerberosDetails();
+
+    if (cluster == null) {
+      String message = "The cluster object is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Config configKrb5Conf = cluster.getDesiredConfigByType("krb5-conf");
+    if (configKrb5Conf == null) {
+      String message = "The 'krb5-conf' configuration is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Map<String, String> krb5ConfProperties = configKrb5Conf.getProperties();
+    if (krb5ConfProperties == null) {
+      String message = "The 'krb5-conf' configuration properties are not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Config configKerberosEnv = cluster.getDesiredConfigByType("kerberos-env");
+    if (configKerberosEnv == null) {
+      String message = "The 'kerberos-env' configuration is not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    Map<String, String> kerberosEnvProperties = configKerberosEnv.getProperties();
+    if (kerberosEnvProperties == null) {
+      String message = "The 'kerberos-env' configuration properties are not available";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    kerberosDetails.setSecurityType(cluster.getSecurityType());
+    kerberosDetails.setDefaultRealm(kerberosEnvProperties.get("realm"));
+
+    kerberosDetails.setKerberosEnvProperties(kerberosEnvProperties);
+
+    // If set, override the manage identities behavior
+    kerberosDetails.setManageIdentities(manageIdentities);
+
+    String kdcTypeProperty = kerberosEnvProperties.get("kdc_type");
+    if ((kdcTypeProperty == null) && kerberosDetails.manageIdentities()) {
+      String message = "The 'kerberos-env/kdc_type' value must be set to a valid KDC type";
+      LOG.error(message);
+      throw new KerberosInvalidConfigurationException(message);
+    }
+
+    KDCType kdcType;
+    try {
+      kdcType = KDCType.translate(kdcTypeProperty);
+    } catch (IllegalArgumentException e) {
+      String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    // Set the KDCType to the the MIT_KDC as a fallback.
+    kerberosDetails.setKdcType((kdcType == null) ? KDCType.MIT_KDC : kdcType);
+
+    return kerberosDetails;
+  }
+
+  /**
+   * Creates a temporary directory within the system temporary directory
+   * <p/>
+   * The resulting directory is to be removed by the caller when desired.
+   *
+   * @return a File pointing to the new temporary directory, or null if one was not created
+   * @throws AmbariException if a new temporary directory cannot be created
+   */
+  private File createTemporaryDirectory() throws AmbariException {
+    String tempDirectoryPath = configuration.getProperty(Configuration.SERVER_TMP_DIR_KEY);
+
+    if ((tempDirectoryPath == null) || tempDirectoryPath.isEmpty()) {
+      tempDirectoryPath = System.getProperty("java.io.tmpdir");
+    }
+
+    try {
+      if (tempDirectoryPath == null) {
+        throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
+      }
+
+      File directory;
+      int tries = 0;
+      long now = System.currentTimeMillis();
+
+      do {
+        directory = new File(tempDirectoryPath, String.format("%s%d-%d.d",
+            KerberosServerAction.DATA_DIRECTORY_PREFIX, now, tries));
+
+        if ((directory.exists()) || !directory.mkdirs()) {
+          directory = null; // Rest and try again...
+        } else {
+          LOG.debug("Created temporary directory: {}", directory.getAbsolutePath());
+        }
+      } while ((directory == null) && (++tries < 100));
+
+      if (directory == null) {
+        throw new IOException(String.format("Failed to create a temporary directory in %s", tempDirectoryPath));
+      }
+
+      return directory;
+    } catch (IOException e) {
+      String message = "Failed to create the temporary data directory.";
+      LOG.error(message, e);
+      throw new AmbariException(message, e);
+    }
+  }
+
+
+  /**
+   * Merges the specified configuration property in a map of configuration types.
+   * The supplied property is processed to replace variables using the replacement Map.
+   * <p/>
+   * See {@link VariableReplacementHelper#replaceVariables(String, java.util.Map)}
+   * for information on variable replacement.
+   *
+   * @param configurations             the Map of configuration types to update
+   * @param configurationSpecification the config-type/property_name value specifying the property to set
+   * @param value                      the value of the property to set
+   * @param replacements               a Map of (grouped) replacement values
+   * @throws AmbariException
+   */
+  private void mergeConfiguration(Map<String, Map<String, String>> configurations,
+                                  String configurationSpecification,
+                                  String value,
+                                  Map<String, Map<String, String>> replacements) throws AmbariException {
+
+    if (configurationSpecification != null) {
+      String[] parts = configurationSpecification.split("/");
+      if (parts.length == 2) {
+        String type = parts[0];
+        String property = parts[1];
+
+        mergeConfigurations(configurations, type, Collections.singletonMap(property, value), replacements);
+      }
+    }
+  }
+
+  /**
+   * Merges configuration from a Map of configuration updates into a main configurations Map.  Each
+   * property in the updates Map is processed to replace variables using the replacement Map.
+   * <p/>
+   * See {@link VariableReplacementHelper#replaceVariables(String, java.util.Map)}
+   * for information on variable replacement.
+   *
+   * @param configurations a Map of configurations
+   * @param type           the configuration type
+   * @param updates        a Map of property updates
+   * @param replacements   a Map of (grouped) replacement values
+   * @throws AmbariException
+   */
+  private void mergeConfigurations(Map<String, Map<String, String>> configurations, String type,
+                                   Map<String, String> updates,
+                                   Map<String, Map<String, String>> replacements) throws AmbariException {
+    if (updates != null) {
+      Map<String, String> existingProperties = configurations.get(type);
+      if (existingProperties == null) {
+        existingProperties = new HashMap<String, String>();
+        configurations.put(type, existingProperties);
+      }
+
+      for (Map.Entry<String, String> property : updates.entrySet()) {
+        existingProperties.put(
+            variableReplacementHelper.replaceVariables(property.getKey(), replacements),
+            variableReplacementHelper.replaceVariables(property.getValue(), replacements)
+        );
+      }
+    }
+  }
+
+  /**
+   * Adds identities to the AuthToLocalBuilder.
+   *
+   * @param authToLocalBuilder the AuthToLocalBuilder to use to build the auth_to_local mapping
+   * @param identities         a List of KerberosIdentityDescriptors to process
+   * @param identityFilter     a Collection of identity names indicating the relevant identities -
+   *                           if null, no filter is relevant; if empty, the filter indicates no
+   *                           relevant identities
+   * @param configurations     a Map of configurations to use a replacements for variables
+   *                           in identity fields
+   * @throws org.apache.ambari.server.AmbariException
+   */
+  private void addIdentities(AuthToLocalBuilder authToLocalBuilder,
+                             List<KerberosIdentityDescriptor> identities, Collection<String> identityFilter,
+                             Map<String, Map<String, String>> configurations) throws AmbariException {
+    if (identities != null) {
+      for (KerberosIdentityDescriptor identity : identities) {
+        // If there is no filter or the filter contains the current identity's name...
+        if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
+          KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
+          if (principalDescriptor != null) {
+            authToLocalBuilder.addRule(
+                variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations),
+                variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
+          }
+        }
+      }
+    }
+  }
+
+
+  /**
+   * Creates a new stage
+   *
+   * @param id              the new stage's id
+   * @param cluster         the relevant Cluster
+   * @param requestId       the relevant request Id
+   * @param requestContext  a String describing the stage
+   * @param clusterHostInfo JSON-encoded clusterHostInfo structure
+   * @param commandParams   JSON-encoded command parameters
+   * @param hostParams      JSON-encoded host parameters
+   * @return a newly created Stage
+   */
+  private Stage createNewStage(long id, Cluster cluster, long requestId,
+                               String requestContext, String clusterHostInfo,
+                               String commandParams, String hostParams) {
+    Stage stage = stageFactory.createNew(requestId,
+        BASE_LOG_DIR + File.pathSeparator + requestId,
+        cluster.getClusterName(),
+        cluster.getClusterId(),
+        requestContext,
+        clusterHostInfo,
+        commandParams,
+        hostParams);
+
+    stage.setStageId(id);
+    return stage;
+  }
+
+  /**
+   * Creates a new stage with a single task describing the ServerAction class to invoke and the other
+   * task-related information.
+   *
+   * @param id                the new stage's id
+   * @param cluster           the relevant Cluster
+   * @param requestId         the relevant request Id
+   * @param requestContext    a String describing the stage
+   * @param clusterHostInfo   JSON-encoded clusterHostInfo structure
+   * @param commandParams     JSON-encoded command parameters
+   * @param hostParams        JSON-encoded host parameters
+   * @param actionClass       The ServeAction class that implements the action to invoke
+   * @param event             The relevant ServiceComponentHostServerActionEvent
+   * @param commandParameters a Map of command parameters to attach to the task added to the new
+   *                          stage
+   * @param commandDetail     a String declaring a descriptive name to pass to the action - null or an
+   *                          empty string indicates no value is to be set
+   * @param timeout           the timeout for the task/action  @return a newly created Stage
+   */
+  private Stage createServerActionStage(long id, Cluster cluster, long requestId,
+                                        String requestContext, String clusterHostInfo,
+                                        String commandParams, String hostParams,
+                                        Class<? extends ServerAction> actionClass,
+                                        ServiceComponentHostServerActionEvent event,
+                                        Map<String, String> commandParameters, String commandDetail,
+                                        Integer timeout) throws AmbariException {
+
+    Stage stage = createNewStage(id, cluster, requestId, requestContext, clusterHostInfo, commandParams, hostParams);
+    stage.addServerActionCommand(actionClass.getName(),
+        Role.AMBARI_SERVER_ACTION,
+        RoleCommand.EXECUTE,
+        cluster.getClusterName(),
+        event,
+        commandParameters,
+        commandDetail,
+        ambariManagementController.findConfigurationTagsWithOverrides(cluster, null),
+        timeout, false);
+
+    return stage;
+  }
+
+  /**
+   * Using the session data from the relevant Cluster object, creates a KerberosCredential,
+   * serializes, and than encrypts it.
+   * <p/>
+   * Since the relevant data is stored in the HTTPSession (which relies on ThreadLocalStorage),
+   * it needs to be retrieved now and placed in the action's command parameters so it will be
+   * available when needed.  Because command parameters are stored in plaintext in the Ambari database,
+   * this (sensitive) data needs to be encrypted, however it needs to be done using a key the can be
+   * recreated sometime later when the data needs to be access. Since it is expected that the Cluster
+   * object will be able now and later, the hashcode of this object is used to build the key - it
+   * is expected that the same instance will be retrieved from the Clusters instance, thus yielding
+   * the same hashcode value.
+   * <p/>
+   * If the Ambari server architecture changes, this will need to be revisited.
+   *
+   * @param cluster the relevant Cluster
+   * @return a serialized and encrypted KerberosCredential, or null if administrator data is not found
+   * @throws AmbariException
+   */
+  private String getEncryptedAdministratorCredentials(Cluster cluster) throws AmbariException {
+    String encryptedAdministratorCredentials = null;
+
+    Map<String, Object> sessionAttributes = cluster.getSessionAttributes();
+    if (sessionAttributes != null) {
+      KerberosCredential credential = KerberosCredential.fromMap(sessionAttributes, "kerberos_admin/");
+      if (credential != null) {
+        byte[] key = Integer.toHexString(cluster.hashCode()).getBytes();
+        encryptedAdministratorCredentials = credential.encrypt(key);
+      }
+    }
+
+    return encryptedAdministratorCredentials;
+  }
+
+  /**
+   * Using the session data from the relevant Cluster object, gets the previously stored
+   * Kerberos service check identifier value or creates a new one if indicated to do so.
+   * <p/>
+   * This value is used intended to be used by the KerberosHelper to manage uniquely crated
+   * principals for use in service checks.
+   *
+   * @param cluster the relevant Cluster
+   * @return the previously stored Kerberos service check identifier value, or null if
+   * not previously stored
+   */
+  private String getKerberosServiceCheckIdentifier(Cluster cluster, boolean createIfNull) {
+    Map<String, Object> sessionAttributes = cluster.getSessionAttributes();
+    Object value = (sessionAttributes == null) ? null : sessionAttributes.get(SERVICE_CHECK_IDENTIFIER);
+    String serviceCheckIdentifier = (value instanceof String) ? (String) value : null;
+
+    if ((serviceCheckIdentifier == null) && createIfNull) {
+      // Create a new (ideally) unique(ish) identifier
+      Random random = new Random(System.currentTimeMillis());
+      char[] chars = new char[8];
+
+      for (int i = 0; i < 8; i++) {
+        chars[i] = (char) ((int) 'a' + random.nextInt(26));
+      }
+
+      serviceCheckIdentifier = String.valueOf(chars);
+      setKerberosServiceCheckIdentifier(cluster, serviceCheckIdentifier);
+    }
+
+    return serviceCheckIdentifier;
+  }
+
+  /**
+   * Stores the Kerberos service check identifier value into the session data from the
+   * relevant Cluster object.
+   * <p/>
+   * This value is used intended to be used by the KerberosHelper to manage uniquely crated
+   * principals for use in service checks.
+   *
+   * @param cluster the relevant Cluster
+   * @param value   the Kerberos service check identifier to store or null to clear any previously set value
+   */
+  private void setKerberosServiceCheckIdentifier(Cluster cluster, String value) {
+    if (value == null) {
+      cluster.removeSessionAttribute(SERVICE_CHECK_IDENTIFIER);
+    } else {
+      cluster.setSessionAttribute(SERVICE_CHECK_IDENTIFIER, value);
+    }
+  }
+
+  /**
+   * Given a Collection of ServiceComponentHosts generates a unique list of hosts.
+   *
+   * @param serviceComponentHosts a Collection of ServiceComponentHosts from which to to retrieve host names
+   * @param allowedStates         a Set of HostStates to use to filter the list of hosts, if null, no filter is applied
+   * @return a List of (unique) host names
+   * @throws org.apache.ambari.server.AmbariException
+   */
+  private List<String> createUniqueHostList(Collection<ServiceComponentHost> serviceComponentHosts, Set<HostState> allowedStates)
+      throws AmbariException {
+    Set<String> hostNames = new HashSet<String>();
+    Set<String> visitedHostNames = new HashSet<String>();
+
+    if (serviceComponentHosts != null) {
+      for (ServiceComponentHost sch : serviceComponentHosts) {
+        String hostname = sch.getHostName();
+        if (!visitedHostNames.contains(hostname)) {
+          // If allowedStates is null, assume the caller doesnt care about the state of the host
+          // so skip the call to get the relevant Host data and just add the host to the list
+          if (allowedStates == null) {
+            hostNames.add(hostname);
+          } else {
+            Host host = clusters.getHost(hostname);
+
+            if (allowedStates.contains(host.getState())) {
+              hostNames.add(hostname);
+            }
+          }
+
+          visitedHostNames.add(hostname);
+        }
+      }
+    }
+
+    return new ArrayList<String>(hostNames);
+  }
+
+  @Override
+  public boolean isClusterKerberosEnabled(Cluster cluster) {
+    return cluster.getSecurityType() == SecurityType.KERBEROS;
+  }
+
+  @Override
+  public boolean shouldExecuteCustomOperations(SecurityType requestSecurityType, Map<String, String> requestProperties) {
+
+    if (((requestSecurityType == SecurityType.KERBEROS) || (requestSecurityType == SecurityType.NONE)) &&
+        (requestProperties != null) && !requestProperties.isEmpty()) {
+      for (SupportedCustomOperation type : SupportedCustomOperation.values()) {
+        if (requestProperties.containsKey(type.name().toLowerCase())) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public Boolean getManageIdentitiesDirective(Map<String, String> requestProperties) {
+    String value = (requestProperties == null) ? null : requestProperties.get(DIRECTIVE_MANAGE_KERBEROS_IDENTITIES);
+
+    return (value == null)
+        ? null
+        : !"false".equalsIgnoreCase(value);
+  }
+
+  /**
+   * Given a list of KerberosIdentityDescriptors, returns a Map fo configuration types to property
+   * names and values.
+   * <p/>
+   * The property names and values are not expected to have any variable replacements done.
+   *
+   * @param identityDescriptors a List of KerberosIdentityDescriptor from which to retrieve configurations
+   * @return a Map of configuration types to property name/value pairs (as a Map)
+   */
+  private Map<String, Map<String, String>> getConfigurations(List<Kerbe

<TRUNCATED>

[4/5] ambari git commit: AMBARI-11396. Kerberos: UI shows Kerberize Cluster step as failed with a retry button, but the backend keeps moving forward to Kerberize the cluster (rlevas)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index 141803b..cb9e6ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -18,177 +18,39 @@
 
 package org.apache.ambari.server.controller;
 
-import com.google.inject.Inject;
-import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.actionmanager.ActionManager;
-import org.apache.ambari.server.actionmanager.RequestFactory;
-import org.apache.ambari.server.actionmanager.Stage;
-import org.apache.ambari.server.actionmanager.StageFactory;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.internal.ArtifactResourceProvider;
-import org.apache.ambari.server.controller.internal.RequestImpl;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.controller.internal.RequestStageContainer;
-import org.apache.ambari.server.controller.spi.ClusterController;
-import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
-import org.apache.ambari.server.controller.spi.NoSuchResourceException;
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.Request;
-import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.controller.spi.ResourceProvider;
-import org.apache.ambari.server.controller.spi.SystemException;
-import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
-import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
-import org.apache.ambari.server.controller.utilities.PredicateBuilder;
-import org.apache.ambari.server.metadata.RoleCommandOrder;
-import org.apache.ambari.server.serveraction.ServerAction;
-import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
-import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
-import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
-import org.apache.ambari.server.serveraction.kerberos.FinalizeKerberosServerAction;
-import org.apache.ambari.server.serveraction.kerberos.KDCType;
-import org.apache.ambari.server.serveraction.kerberos.KerberosConfigDataFileWriter;
-import org.apache.ambari.server.serveraction.kerberos.KerberosConfigDataFileWriterFactory;
-import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriter;
 import org.apache.ambari.server.serveraction.kerberos.KerberosAdminAuthenticationException;
-import org.apache.ambari.server.serveraction.kerberos.KerberosCredential;
-import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriterFactory;
+import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriter;
 import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
-import org.apache.ambari.server.serveraction.kerberos.KerberosKDCConnectionException;
-import org.apache.ambari.server.serveraction.kerberos.KerberosLDAPContainerException;
 import org.apache.ambari.server.serveraction.kerberos.KerberosMissingAdminCredentialsException;
 import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException;
-import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandler;
-import org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandlerFactory;
-import org.apache.ambari.server.serveraction.kerberos.KerberosRealmException;
-import org.apache.ambari.server.serveraction.kerberos.KerberosServerAction;
-import org.apache.ambari.server.serveraction.kerberos.UpdateKerberosConfigsServerAction;
-import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
-import org.apache.ambari.server.stageplanner.RoleGraph;
-import org.apache.ambari.server.stageplanner.RoleGraphFactory;
 import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.PropertyInfo;
-import org.apache.ambari.server.state.SecurityState;
 import org.apache.ambari.server.state.SecurityType;
-import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.State;
-import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
-import org.apache.ambari.server.state.kerberos.VariableReplacementHelper;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent;
-import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import java.io.File;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
 
-public class KerberosHelper {
+public interface KerberosHelper {
   /**
    * directive used to override the behavior of the kerberos-env/manage_identities property
    */
-  public static final String DIRECTIVE_MANAGE_KERBEROS_IDENTITIES = "manage_kerberos_identities";
-
-  private static final String BASE_LOG_DIR = "/tmp/ambari";
-
-  private static final Logger LOG = LoggerFactory.getLogger(KerberosHelper.class);
-
+  String DIRECTIVE_MANAGE_KERBEROS_IDENTITIES = "manage_kerberos_identities";
   /**
    * config type which contains the property used to determine if Kerberos is enabled
    */
-  private static final String SECURITY_ENABLED_CONFIG_TYPE = "cluster-env";
-
+  String SECURITY_ENABLED_CONFIG_TYPE = "cluster-env";
   /**
    * name of property which states whether kerberos is enabled
    */
-  private static final String SECURITY_ENABLED_PROPERTY_NAME = "security_enabled";
-
-  /**
-   * name of the property used to hold the service check identifier value, used when creating and
-   * destroying the (unique) service check identity.
-   */
-  private static final String SERVICE_CHECK_IDENTIFIER = "_kerberos_internal_service_check_identifier";
-
-  @Inject
-  private AmbariCustomCommandExecutionHelper customCommandExecutionHelper;
-
-  @Inject
-  private AmbariManagementController ambariManagementController;
-
-  @Inject
-  private AmbariMetaInfo ambariMetaInfo;
-
-  @Inject
-  private ActionManager actionManager;
-
-  @Inject
-  private RequestFactory requestFactory;
-
-  @Inject
-  private StageFactory stageFactory;
-
-  @Inject
-  private RoleGraphFactory roleGraphFactory;
-
-  @Inject
-  private Clusters clusters;
-
-  @Inject
-  private ConfigHelper configHelper;
-
-  @Inject
-  private Configuration configuration;
-
-  @Inject
-  private KerberosOperationHandlerFactory kerberosOperationHandlerFactory;
-
-  @Inject
-  private KerberosDescriptorFactory kerberosDescriptorFactory;
-
-  @Inject
-  private KerberosIdentityDataFileWriterFactory kerberosIdentityDataFileWriterFactory;
-
-  @Inject
-  private KerberosConfigDataFileWriterFactory kerberosConfigDataFileWriterFactory;
-
-  @Inject
-  private VariableReplacementHelper variableReplacementHelper;
-
-  /**
-   * Used to get kerberos descriptors associated with the cluster or stack.
-   * Currently not available via injection.
-   */
-  private static ClusterController clusterController = null;
+  String SECURITY_ENABLED_PROPERTY_NAME = "security_enabled";
 
   /**
    * Toggles Kerberos security to enable it or remove it depending on the state of the cluster.
@@ -219,28 +81,10 @@ public class KerberosHelper {
    *                                               Kerberos-specific configuration details
    * @throws KerberosOperationException
    */
-  public RequestStageContainer toggleKerberos(Cluster cluster, SecurityType securityType,
-                                              RequestStageContainer requestStageContainer,
-                                              Boolean manageIdentities)
-      throws AmbariException, KerberosOperationException {
-
-    KerberosDetails kerberosDetails = getKerberosDetails(cluster, manageIdentities);
-
-    // Update KerberosDetails with the new security type - the current one in the cluster is the "old" value
-    kerberosDetails.setSecurityType(securityType);
-
-    if (securityType == SecurityType.KERBEROS) {
-      LOG.info("Configuring Kerberos for realm {} on cluster, {}", kerberosDetails.getDefaultRealm(), cluster.getClusterName());
-      requestStageContainer = handle(cluster, kerberosDetails, null, null, null, requestStageContainer, new EnableKerberosHandler());
-    } else if (securityType == SecurityType.NONE) {
-      LOG.info("Disabling Kerberos from cluster, {}", cluster.getClusterName());
-      requestStageContainer = handle(cluster, kerberosDetails, null, null, null, requestStageContainer, new DisableKerberosHandler());
-    } else {
-      throw new AmbariException(String.format("Unexpected security type value: %s", securityType.name()));
-    }
-
-    return requestStageContainer;
-  }
+  RequestStageContainer toggleKerberos(Cluster cluster, SecurityType securityType,
+                                       RequestStageContainer requestStageContainer,
+                                       Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException;
 
   /**
    * Used to execute custom security operations which are sent as directives in URI
@@ -260,46 +104,10 @@ public class KerberosHelper {
    * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
    *                                               Kerberos-specific configuration details
    */
-  public RequestStageContainer executeCustomOperations(Cluster cluster, Map<String, String> requestProperties,
-                                                       RequestStageContainer requestStageContainer,
-                                                       Boolean manageIdentities)
-      throws AmbariException, KerberosOperationException {
-
-    if (requestProperties != null) {
-
-      for (SupportedCustomOperation operation : SupportedCustomOperation.values()) {
-        if (requestProperties.containsKey(operation.name().toLowerCase())) {
-          String value = requestProperties.get(operation.name().toLowerCase());
-
-          // The operation specific logic is kept in one place and described here
-          switch (operation) {
-            case REGENERATE_KEYTABS:
-              if (cluster.getSecurityType() != SecurityType.KERBEROS) {
-                throw new AmbariException(String.format("Custom operation %s can only be requested with the security type cluster property: %s", operation.name(), SecurityType.KERBEROS.name()));
-              }
-
-              if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
-                requestStageContainer = handle(cluster, getKerberosDetails(cluster, manageIdentities), null, null, null,
-                    requestStageContainer, new CreatePrincipalsAndKeytabsHandler(true));
-              } else if ("missing".equalsIgnoreCase(value)) {
-                requestStageContainer = handle(cluster, getKerberosDetails(cluster, manageIdentities), null, null, null,
-                    requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
-              } else {
-                throw new AmbariException(String.format("Unexpected directive value: %s", value));
-              }
-
-              break;
-
-            default: // No other operations are currently supported
-              throw new AmbariException(String.format("Custom operation not supported: %s", operation.name()));
-          }
-        }
-      }
-    }
-
-    return requestStageContainer;
-  }
-
+  RequestStageContainer executeCustomOperations(Cluster cluster, Map<String, String> requestProperties,
+                                                RequestStageContainer requestStageContainer,
+                                                Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException;
 
   /**
    * Ensures the set of filtered principals and keytabs exist on the cluster.
@@ -341,13 +149,10 @@ public class KerberosHelper {
    * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
    *                                               Kerberos-specific configuration details
    */
-  public RequestStageContainer ensureIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
-                                                Collection<String> identityFilter, Set<String> hostsToForceKerberosOperations,
-                                                RequestStageContainer requestStageContainer, Boolean manageIdentities)
-      throws AmbariException, KerberosOperationException {
-    return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, identityFilter,
-        hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
-  }
+  RequestStageContainer ensureIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                         Collection<String> identityFilter, Set<String> hostsToForceKerberosOperations,
+                                         RequestStageContainer requestStageContainer, Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException;
 
   /**
    * Deletes the set of filtered principals and keytabs from the cluster.
@@ -382,13 +187,10 @@ public class KerberosHelper {
    * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
    *                                               Kerberos-specific configuration details
    */
-  public RequestStageContainer deleteIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
-                                                Collection<String> identityFilter, RequestStageContainer requestStageContainer,
-                                                Boolean manageIdentities)
-      throws AmbariException, KerberosOperationException {
-    return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, identityFilter, null,
-        requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
-  }
+  RequestStageContainer deleteIdentities(Cluster cluster, Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                         Collection<String> identityFilter, RequestStageContainer requestStageContainer,
+                                         Boolean manageIdentities)
+      throws AmbariException, KerberosOperationException;
 
   /**
    * Updates the relevant configurations for the given Service.
@@ -400,57 +202,8 @@ public class KerberosHelper {
    * @param serviceComponentHost the ServiceComponentHost
    * @throws AmbariException
    */
-  public void configureService(Cluster cluster, ServiceComponentHost serviceComponentHost)
-      throws AmbariException, KerberosInvalidConfigurationException {
-
-    KerberosDetails kerberosDetails = getKerberosDetails(cluster, null);
-
-    // Set properties...
-    String serviceName = serviceComponentHost.getServiceName();
-    KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
-    KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
-
-    if (serviceDescriptor != null) {
-      Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
-      Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
-      Map<String, Map<String, String>> configurations = calculateConfigurations(cluster,
-          serviceComponentHost.getHostName(), kerberosDescriptorProperties);
-
-      Map<String, KerberosComponentDescriptor> componentDescriptors = serviceDescriptor.getComponents();
-      for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) {
-        if (componentDescriptor != null) {
-          Map<String, Map<String, String>> identityConfigurations;
-          List<KerberosIdentityDescriptor> identities;
-
-          identities = serviceDescriptor.getIdentities(true);
-          identityConfigurations = getConfigurations(identities);
-          if (identityConfigurations != null) {
-            for (Map.Entry<String, Map<String, String>> entry : identityConfigurations.entrySet()) {
-              mergeConfigurations(kerberosConfigurations, entry.getKey(), entry.getValue(), configurations);
-            }
-          }
-
-          identities = componentDescriptor.getIdentities(true);
-          identityConfigurations = getConfigurations(identities);
-          if (identityConfigurations != null) {
-            for (Map.Entry<String, Map<String, String>> entry : identityConfigurations.entrySet()) {
-              mergeConfigurations(kerberosConfigurations, entry.getKey(), entry.getValue(), configurations);
-            }
-          }
-
-          mergeConfigurations(kerberosConfigurations,
-              componentDescriptor.getConfigurations(true), configurations);
-        }
-      }
-
-      setAuthToLocalRules(kerberosDescriptor, cluster, kerberosDetails.getDefaultRealm(), configurations, kerberosConfigurations);
-
-      for (Map.Entry<String, Map<String, String>> entry : kerberosConfigurations.entrySet()) {
-        configHelper.updateConfigType(cluster, ambariManagementController, entry.getKey(), entry.getValue(), null,
-            ambariManagementController.getAuthName(), String.format("Enabling Kerberos for %s", serviceName));
-      }
-    }
-  }
+  void configureService(Cluster cluster, ServiceComponentHost serviceComponentHost)
+      throws AmbariException, KerberosInvalidConfigurationException;
 
   /**
    * Create a unique identity to use for testing the general Kerberos configuration.
@@ -462,11 +215,9 @@ public class KerberosHelper {
    * @return the updated or a new RequestStageContainer containing the stages that need to be
    * executed to complete this task; or null if no stages need to be executed.
    */
-  public RequestStageContainer createTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
-                                                  RequestStageContainer requestStageContainer)
-      throws KerberosOperationException, AmbariException {
-    return handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
-  }
+  RequestStageContainer createTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
+                                           RequestStageContainer requestStageContainer)
+      throws KerberosOperationException, AmbariException;
 
   /**
    * Deletes the unique identity to use for testing the general Kerberos configuration.
@@ -478,16 +229,9 @@ public class KerberosHelper {
    * @return the updated or a new RequestStageContainer containing the stages that need to be
    * executed to complete this task; or null if no stages need to be executed.
    */
-  public RequestStageContainer deleteTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
-                                                  RequestStageContainer requestStageContainer)
-      throws KerberosOperationException, AmbariException {
-    requestStageContainer = handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new DeletePrincipalsAndKeytabsHandler());
-
-    // Clear the Kerberos service check identifier
-    setKerberosServiceCheckIdentifier(cluster, null);
-
-    return requestStageContainer;
-  }
+  RequestStageContainer deleteTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
+                                           RequestStageContainer requestStageContainer)
+      throws KerberosOperationException, AmbariException;
 
   /**
    * Validate the KDC admin credentials.
@@ -495,110 +239,10 @@ public class KerberosHelper {
    * @param cluster associated cluster
    * @throws AmbariException if any other error occurs while trying to validate the credentials
    */
-  public void validateKDCCredentials(Cluster cluster) throws KerberosMissingAdminCredentialsException,
-      KerberosAdminAuthenticationException,
-      KerberosInvalidConfigurationException,
-      AmbariException {
-    validateKDCCredentials(null, cluster);
-  }
-
-  /**
-   * Validate the KDC admin credentials.
-   *
-   * @param kerberosDetails the KerberosDetails containing information about the Kerberos configuration
-   *                        for the cluster, if null, a new KerberosDetails will be created based on
-   *                        information found in the associated cluster
-   * @param cluster         associated cluster
-   * @throws AmbariException if any other error occurs while trying to validate the credentials
-   */
-  private void validateKDCCredentials(KerberosDetails kerberosDetails, Cluster cluster) throws KerberosMissingAdminCredentialsException,
+  void validateKDCCredentials(Cluster cluster) throws KerberosMissingAdminCredentialsException,
       KerberosAdminAuthenticationException,
       KerberosInvalidConfigurationException,
-      AmbariException {
-
-    if(kerberosDetails == null) {
-      kerberosDetails = getKerberosDetails(cluster, null);
-    }
-
-    if(kerberosDetails.manageIdentities()) {
-      String credentials = getEncryptedAdministratorCredentials(cluster);
-      if (credentials == null) {
-        throw new KerberosMissingAdminCredentialsException(
-            "Missing KDC administrator credentials.\n" +
-                "The KDC administrator credentials must be set in session by updating the relevant Cluster resource." +
-                "This may be done by issuing a PUT to the api/v1/clusters/(cluster name) API entry point with the following payload:\n" +
-                "{\n" +
-                "  \"session_attributes\" : {\n" +
-                "    \"kerberos_admin\" : {\"principal\" : \"(PRINCIPAL)\", \"password\" : \"(PASSWORD)\"}\n" +
-                "  }\n" +
-                "}"
-        );
-      } else {
-        KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
-
-        if (operationHandler == null) {
-          throw new AmbariException("Failed to get an appropriate Kerberos operation handler.");
-        } else {
-          byte[] key = Integer.toHexString(cluster.hashCode()).getBytes();
-          KerberosCredential kerberosCredentials = KerberosCredential.decrypt(credentials, key);
-
-          boolean missingCredentials = false;
-          try {
-            operationHandler.open(kerberosCredentials, kerberosDetails.getDefaultRealm(), kerberosDetails.getKerberosEnvProperties());
-            // todo: this is really odd that open doesn't throw an exception if the credentials are missing
-            missingCredentials = !operationHandler.testAdministratorCredentials();
-          } catch (KerberosAdminAuthenticationException e) {
-            throw new KerberosAdminAuthenticationException(
-                "Invalid KDC administrator credentials.\n" +
-                    "The KDC administrator credentials must be set in session by updating the relevant Cluster resource." +
-                    "This may be done by issuing a PUT to the api/v1/clusters/(cluster name) API entry point with the following payload:\n" +
-                    "{\n" +
-                    "  \"session_attributes\" : {\n" +
-                    "    \"kerberos_admin\" : {\"principal\" : \"(PRINCIPAL)\", \"password\" : \"(PASSWORD)\"}\n" +
-                    "  }\n" +
-                    "}", e);
-          } catch (KerberosKDCConnectionException e) {
-            throw new KerberosInvalidConfigurationException(
-                "Failed to connect to KDC - " + e.getMessage() + "\n" +
-                    "Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
-                e);
-          } catch (KerberosRealmException e) {
-            throw new KerberosInvalidConfigurationException(
-                "Failed to find a KDC for the specified realm - " + e.getMessage() + "\n" +
-                    "Update the KDC settings in krb5-conf and kerberos-env configurations to correct this issue.",
-                e);
-          } catch (KerberosLDAPContainerException e) {
-            throw new KerberosInvalidConfigurationException(
-                "The principal container was not specified\n" +
-                    "Set the 'container_dn' value in the kerberos-env configuration to correct this issue.",
-                e);
-          } catch (KerberosOperationException e) {
-            throw new AmbariException(e.getMessage(), e);
-          } finally {
-            try {
-              operationHandler.close();
-            } catch (KerberosOperationException e) {
-              // Ignore this...
-            }
-          }
-
-          // need to throw this outside of the try/catch so it isn't caught
-          if (missingCredentials) {
-            throw new KerberosMissingAdminCredentialsException(
-                "Invalid KDC administrator credentials.\n" +
-                    "The KDC administrator credentials must be set in session by updating the relevant Cluster resource." +
-                    "This may be done by issuing a PUT to the api/v1/clusters/(cluster name) API entry point with the following payload:\n" +
-                    "{\n" +
-                    "  \"session_attributes\" : {\n" +
-                    "    \"kerberos_admin\" : {\"principal\" : \"(PRINCIPAL)\", \"password\" : \"(PASSWORD)\"}\n" +
-                    "  }\n" +
-                    "}"
-            );
-          }
-        }
-      }
-    }
-  }
+      AmbariException;
 
   /**
    * Sets the relevant auth-to-local rule configuration properties using the services installed on
@@ -612,644 +256,19 @@ public class KerberosHelper {
    *                               auth-to-local values will be stored
    * @throws AmbariException
    */
-  private void setAuthToLocalRules(KerberosDescriptor kerberosDescriptor, Cluster cluster, String realm,
-                                   Map<String, Map<String, String>> existingConfigurations,
-                                   Map<String, Map<String, String>> kerberosConfigurations)
-      throws AmbariException {
-
-    if (kerberosDescriptor != null) {
-
-      Set<String> authToLocalProperties;
-      Set<String> authToLocalPropertiesToSet = new HashSet<String>();
-
-      // Determine which properties need to be set
-      AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder();
-
-      addIdentities(authToLocalBuilder, kerberosDescriptor.getIdentities(), null, existingConfigurations);
-
-      authToLocalProperties = kerberosDescriptor.getAuthToLocalProperties();
-      if (authToLocalProperties != null) {
-        authToLocalPropertiesToSet.addAll(authToLocalProperties);
-      }
-
-      Map<String, KerberosServiceDescriptor> services = kerberosDescriptor.getServices();
-      if (services != null) {
-        Map<String, Service> installedServices = cluster.getServices();
-
-        for (KerberosServiceDescriptor service : services.values()) {
-          if (installedServices.containsKey(service.getName())) {
-
-            addIdentities(authToLocalBuilder, service.getIdentities(true), null, existingConfigurations);
-
-            authToLocalProperties = service.getAuthToLocalProperties();
-            if (authToLocalProperties != null) {
-              authToLocalPropertiesToSet.addAll(authToLocalProperties);
-            }
-
-            Map<String, KerberosComponentDescriptor> components = service.getComponents();
-            if (components != null) {
-              for (KerberosComponentDescriptor component : components.values()) {
-                addIdentities(authToLocalBuilder, component.getIdentities(true), null, existingConfigurations);
-
-                authToLocalProperties = component.getAuthToLocalProperties();
-                if (authToLocalProperties != null) {
-                  authToLocalPropertiesToSet.addAll(authToLocalProperties);
-                }
-              }
-            }
-          }
-        }
-      }
-
-      if (!authToLocalPropertiesToSet.isEmpty()) {
-        for (String authToLocalProperty : authToLocalPropertiesToSet) {
-          String[] parts = authToLocalProperty.split("/");
-
-          if (parts.length == 2) {
-            AuthToLocalBuilder builder = authToLocalBuilder.copy();
-            String configType = parts[0];
-            String propertyName = parts[1];
-
-            // Add existing auth_to_local configuration, if set
-            Map<String, String> existingConfiguration = existingConfigurations.get(configType);
-            if (existingConfiguration != null) {
-              builder.addRules(existingConfiguration.get(propertyName));
-            }
-
-            // Add/update descriptor auth_to_local configuration, if set
-            Map<String, String> kerberosConfiguration = kerberosConfigurations.get(configType);
-            if (kerberosConfiguration != null) {
-              builder.addRules(kerberosConfiguration.get(propertyName));
-            } else {
-              kerberosConfiguration = new HashMap<String, String>();
-              kerberosConfigurations.put(configType, kerberosConfiguration);
-            }
-
-            kerberosConfiguration.put(propertyName, builder.generate(realm));
-          }
-        }
-      }
-    }
-  }
-
-
-  /**
-   * Performs operations needed to process Kerberos related tasks on the relevant cluster.
-   * <p/>
-   * Iterates through the components installed on the relevant cluster to determine if work
-   * need to be done.  Calls into the Handler implementation to provide guidance and set up stages
-   * to perform the work needed to complete the relative action.
-   *
-   * @param cluster                        the relevant Cluster
-   * @param kerberosDetails                a KerberosDetails containing information about relevant Kerberos configuration
-   * @param serviceComponentFilter         a Map of service names to component names indicating the relevant
-   *                                       set of services and components - if null, no filter is relevant;
-   *                                       if empty, the filter indicates no relevant services or components
-   * @param identityFilter                 a Collection of identity names indicating the relevant identities -
-   *                                       if null, no filter is relevant; if empty, the filter indicates no
-   *                                       relevant identities
-   * @param hostsToForceKerberosOperations a set of host names on which it is expected that the
-   *                                       Kerberos client is or will be in the INSTALLED state by
-   *                                       the time the operations targeted for them are to be
-   *                                       executed - if empty or null, this no hosts will be
-   *                                       "forced"
-   * @param requestStageContainer          a RequestStageContainer to place generated stages, if needed -
-   *                                       if null a new RequestStageContainer will be created.
-   * @param handler                        a Handler to use to provide guidance and set up stages
-   *                                       to perform the work needed to complete the relative action
-   * @return the updated or a new RequestStageContainer containing the stages that need to be
-   * executed to complete this task; or null if no stages need to be executed.
-   * @throws AmbariException
-   * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
-   *                                               Kerberos-specific configuration details
-   */
-  @Transactional
-  private RequestStageContainer handle(Cluster cluster,
-                                       KerberosDetails kerberosDetails,
-                                       Map<String, ? extends Collection<String>> serviceComponentFilter,
-                                       Collection<String> identityFilter,
-                                       Set<String> hostsToForceKerberosOperations,
-                                       RequestStageContainer requestStageContainer,
-                                       Handler handler)
-      throws AmbariException, KerberosOperationException {
-
-    Map<String, Service> services = cluster.getServices();
-
-    if ((services != null) && !services.isEmpty()) {
-      SecurityState desiredSecurityState = handler.getNewServiceSecurityState();
-      String clusterName = cluster.getClusterName();
-      Map<String, Host> hosts = clusters.getHostsForCluster(clusterName);
-
-      if ((hosts != null) && !hosts.isEmpty()) {
-        List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<ServiceComponentHost>();
-        KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
-        KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter = null;
-        Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
-        Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
-
-        // While iterating over all the ServiceComponentHosts find hosts that have KERBEROS_CLIENT
-        // components in the INSTALLED state and add them to the hostsWithValidKerberosClient Set.
-        // This is needed to help determine which hosts to perform actions for and create tasks for.
-        Set<String> hostsWithValidKerberosClient = new HashSet<String>();
-
-        // Ensure that that hosts that should be assumed to be in the correct state when needed are
-        // in the hostsWithValidKerberosClient collection.
-        if (hostsToForceKerberosOperations != null) {
-          hostsWithValidKerberosClient.addAll(hostsToForceKerberosOperations);
-        }
-
-        // Create a temporary directory to store metadata needed to complete this task.  Information
-        // such as which principals and keytabs files to create as well as what configurations need
-        // to be update are stored in data files in this directory. Any keytab files are stored in
-        // this directory until they are distributed to their appropriate hosts.
-        File dataDirectory = createTemporaryDirectory();
-
-        // Create the file used to store details about principals and keytabs to create
-        File identityDataFile = new File(dataDirectory, KerberosIdentityDataFileWriter.DATA_FILE_NAME);
-
-        try {
-          // Iterate over the hosts in the cluster to find the components installed in each.  For each
-          // component (aka service component host - sch) determine the configuration updates and
-          // and the principals an keytabs to create.
-          for (Host host : hosts.values()) {
-            String hostname = host.getHostName();
-
-            // Get a list of components on the current host
-            List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
-
-            if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
-              // Calculate the current host-specific configurations. These will be used to replace
-              // variables within the Kerberos descriptor data
-              Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, hostname, kerberosDescriptorProperties);
-
-              // Iterate over the components installed on the current host to get the service and
-              // component-level Kerberos descriptors in order to determine which principals,
-              // keytab files, and configurations need to be created or updated.
-              for (ServiceComponentHost sch : serviceComponentHosts) {
-                String serviceName = sch.getServiceName();
-                String componentName = sch.getServiceComponentName();
-
-                // If the current ServiceComponentHost represents the KERBEROS/KERBEROS_CLIENT and
-                // indicates that the KERBEROS_CLIENT component is in the INSTALLED state, add the
-                // current host to the set of hosts that should be handled...
-                if (Service.Type.KERBEROS.name().equals(serviceName) &&
-                    Role.KERBEROS_CLIENT.name().equals(componentName) &&
-                    (sch.getState() == State.INSTALLED)) {
-                  hostsWithValidKerberosClient.add(hostname);
-                }
-
-                // If there is no filter or the filter contains the current service name...
-                if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey(serviceName)) {
-                  Collection<String> componentFilter = (serviceComponentFilter == null) ? null : serviceComponentFilter.get(serviceName);
-                  KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
-
-                  if (serviceDescriptor != null) {
-                    int identitiesAdded = 0;
-                    List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
-
-                    // Lazily create the KerberosIdentityDataFileWriter instance...
-                    if (kerberosIdentityDataFileWriter == null) {
-                      kerberosIdentityDataFileWriter = kerberosIdentityDataFileWriterFactory.createKerberosIdentityDataFileWriter(identityDataFile);
-                    }
-
-                    // Add service-level principals (and keytabs)
-                    identitiesAdded += addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
-                        identityFilter, hostname, serviceName, componentName, kerberosConfigurations, configurations);
-
-                    // If there is no filter or the filter contains the current component name,
-                    // test to see if this component should be process by querying the handler...
-                    if (((componentFilter == null) || componentFilter.contains(componentName)) && handler.shouldProcess(desiredSecurityState, sch)) {
-                      KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
-
-                      if (componentDescriptor != null) {
-                        List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
-
-                        // Calculate the set of configurations to update and replace any variables
-                        // using the previously calculated Map of configurations for the host.
-                        mergeConfigurations(kerberosConfigurations,
-                            componentDescriptor.getConfigurations(true), configurations);
-
-                        // Add component-level principals (and keytabs)
-                        identitiesAdded += addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
-                            identityFilter, hostname, serviceName, componentName, kerberosConfigurations, configurations);
-                      }
-                    }
-
-                    if (identitiesAdded > 0) {
-                      serviceComponentHostsToProcess.add(sch);
-                    }
-                  }
-                }
-              }
-            }
-          }
-        } catch (IOException e) {
-          String message = String.format("Failed to write index file - %s", identityDataFile.getAbsolutePath());
-          LOG.error(message);
-          throw new AmbariException(message, e);
-        } finally {
-          if (kerberosIdentityDataFileWriter != null) {
-            // Make sure the data file is closed
-            try {
-              kerberosIdentityDataFileWriter.close();
-            } catch (IOException e) {
-              LOG.warn("Failed to close the index file writer", e);
-            }
-          }
-        }
-
-        // If there are ServiceComponentHosts to process, make sure the administrator credentials
-        // are available
-        if (!serviceComponentHostsToProcess.isEmpty()) {
-          try {
-            validateKDCCredentials(kerberosDetails, cluster);
-          } catch (KerberosOperationException e) {
-            try {
-              FileUtils.deleteDirectory(dataDirectory);
-            } catch (Throwable t) {
-              LOG.warn(String.format("The data directory (%s) was not deleted due to an error condition - {%s}",
-                  dataDirectory.getAbsolutePath(), t.getMessage()), t);
-            }
-
-            throw e;
-          }
-
-          setAuthToLocalRules(kerberosDescriptor, cluster, kerberosDetails.getDefaultRealm(),
-              calculateConfigurations(cluster, null, kerberosDescriptorProperties),
-              kerberosConfigurations);
-        }
-
-        // Ensure the cluster-env/security_enabled flag is set properly
-        Map<String, String> clusterEnvProperties = kerberosConfigurations.get(SECURITY_ENABLED_CONFIG_TYPE);
-        if (clusterEnvProperties == null) {
-          clusterEnvProperties = new HashMap<String, String>();
-          kerberosConfigurations.put(SECURITY_ENABLED_CONFIG_TYPE, clusterEnvProperties);
-        }
-        clusterEnvProperties.put(SECURITY_ENABLED_PROPERTY_NAME,
-            (kerberosDetails.getSecurityType() == SecurityType.KERBEROS) ? "true" : "false");
-
-        // Always set up the necessary stages to perform the tasks needed to complete the operation.
-        // Some stages may be no-ops, this is expected.
-        // Gather data needed to create stages and tasks...
-        Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
-        String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
-        Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster);
-        String hostParamsJson = StageUtils.getGson().toJson(hostParams);
-        String ambariServerHostname = StageUtils.getHostName();
-        ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
-            "AMBARI_SERVER",
-            ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
-            System.currentTimeMillis());
-        RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
-
-        // If a RequestStageContainer does not already exist, create a new one...
-        if (requestStageContainer == null) {
-          requestStageContainer = new RequestStageContainer(
-              actionManager.getNextRequestId(),
-              null,
-              requestFactory,
-              actionManager);
-        }
-
-        // Use the handler implementation to setup the relevant stages.
-        handler.createStages(cluster, hosts, kerberosConfigurations, clusterHostInfoJson,
-            hostParamsJson, event, roleCommandOrder, kerberosDetails, dataDirectory,
-            requestStageContainer, serviceComponentHostsToProcess, hostsWithValidKerberosClient);
-
-        // Add the finalize stage...
-        handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
-            dataDirectory, roleCommandOrder, requestStageContainer);
-
-        // If all goes well, set the appropriate states on the relevant ServiceComponentHosts
-        for (ServiceComponentHost sch : serviceComponentHostsToProcess) {
-          // Update the desired and current states for the ServiceComponentHost
-          // using new state information from the the handler implementation
-          SecurityState newSecurityState;
-
-          newSecurityState = handler.getNewDesiredSCHSecurityState();
-          if (newSecurityState != null) {
-            sch.setDesiredSecurityState(newSecurityState);
-          }
-
-          newSecurityState = handler.getNewSCHSecurityState();
-          if (newSecurityState != null) {
-            sch.setSecurityState(newSecurityState);
-          }
-        }
-      }
-
-      // If all goes well, set all services to _desire_ to be secured or unsecured, depending on handler
-      if (desiredSecurityState != null) {
-        for (Service service : services.values()) {
-          if ((serviceComponentFilter == null) || serviceComponentFilter.containsKey(service.getName())) {
-            service.setSecurityState(desiredSecurityState);
-          }
-        }
-      }
-    }
-
-    return requestStageContainer;
-  }
-
-  /**
-   * Performs operations needed to process Kerberos related tasks to manage a (unique) test identity
-   * on the relevant cluster.
-   *
-   * If Ambari is not managing Kerberos identities, than this method does nothing.
-   *
-   * @param cluster               the relevant Cluster
-   * @param kerberosDetails       a KerberosDetails containing information about relevant Kerberos
-   *                              configuration
-   * @param commandParameters     the command parameters map used to read and/or write attributes
-   *                              related to this operation
-   * @param requestStageContainer a RequestStageContainer to place generated stages, if needed -
-   *                              if null a new RequestStageContainer will be created.
-   * @param handler               a Handler to use to provide guidance and set up stages
-   *                              to perform the work needed to complete the relative action
-   * @return the updated or a new RequestStageContainer containing the stages that need to be
-   * executed to complete this task; or null if no stages need to be executed.
-   * @throws AmbariException
-   * @throws KerberosOperationException
-   */
-  private RequestStageContainer handleTestIdentity(Cluster cluster,
-                                                   KerberosDetails kerberosDetails,
-                                                   Map<String, String> commandParameters, RequestStageContainer requestStageContainer,
-                                                   Handler handler) throws AmbariException, KerberosOperationException {
-
-    if(kerberosDetails.manageIdentities()) {
-      if (commandParameters == null) {
-        throw new AmbariException("The properties map must not be null.  It is needed to store data related to the service check identity");
-      }
-
-      Map<String, Service> services = cluster.getServices();
-
-      if ((services != null) && !services.isEmpty()) {
-        String clusterName = cluster.getClusterName();
-        Map<String, Host> hosts = clusters.getHostsForCluster(clusterName);
-
-        if ((hosts != null) && !hosts.isEmpty()) {
-          List<ServiceComponentHost> serviceComponentHostsToProcess = new ArrayList<ServiceComponentHost>();
-          KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
-          KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter = null;
-          Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
-
-          // While iterating over all the ServiceComponentHosts find hosts that have KERBEROS_CLIENT
-          // components in the INSTALLED state and add them to the hostsWithValidKerberosClient Set.
-          // This is needed to help determine which hosts to perform actions for and create tasks for.
-          Set<String> hostsWithValidKerberosClient = new HashSet<String>();
-
-          // Create a temporary directory to store metadata needed to complete this task.  Information
-          // such as which principals and keytabs files to create as well as what configurations need
-          // to be update are stored in data files in this directory. Any keytab files are stored in
-          // this directory until they are distributed to their appropriate hosts.
-          File dataDirectory = createTemporaryDirectory();
-
-          // Create the file used to store details about principals and keytabs to create
-          File identityDataFile = new File(dataDirectory, KerberosIdentityDataFileWriter.DATA_FILE_NAME);
-
-          // Create a special identity for the test user
-          KerberosIdentityDescriptor identity = new KerberosIdentityDescriptor(new HashMap<String, Object>() {
-            {
-              put("principal",
-                  new HashMap<String, Object>() {
-                    {
-                      put("value", "${cluster-env/smokeuser}_${service_check_id}@${realm}");
-                      put("type", "user");
-                    }
-                  });
-              put("keytab",
-                  new HashMap<String, Object>() {
-                    {
-                      put("file", "${keytab_dir}/kerberos.service_check.${service_check_id}.keytab");
-
-                      put("owner", new HashMap<String, Object>() {{
-                        put("name", "${cluster-env/smokeuser}");
-                        put("access", "rw");
-                      }});
-
-                      put("group", new HashMap<String, Object>() {{
-                        put("name", "${cluster-env/user_group}");
-                        put("access", "r");
-                      }});
-
-                      put("cachable", "false");
-                    }
-                  });
-            }
-          });
-
-          // Get or create the unique service check identifier
-          String serviceCheckId = getKerberosServiceCheckIdentifier(cluster, true);
-
-          try {
-            // Iterate over the hosts in the cluster to find the components installed in each.  For each
-            // component (aka service component host - sch) determine the configuration updates and
-            // and the principals an keytabs to create.
-            for (Host host : hosts.values()) {
-              String hostname = host.getHostName();
-
-              // Get a list of components on the current host
-              List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
-
-              if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) {
-                // Calculate the current host-specific configurations. These will be used to replace
-                // variables within the Kerberos descriptor data
-                Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, hostname, kerberosDescriptorProperties);
-
-                // Set the unique service check identifier
-                configurations.get("").put("service_check_id", serviceCheckId);
-
-                // Iterate over the components installed on the current host to get the service and
-                // component-level Kerberos descriptors in order to determine which principals,
-                // keytab files, and configurations need to be created or updated.
-                for (ServiceComponentHost sch : serviceComponentHosts) {
-                  String serviceName = sch.getServiceName();
-                  String componentName = sch.getServiceComponentName();
-
-                  // If the current ServiceComponentHost represents the KERBEROS/KERBEROS_CLIENT and
-                  // indicates that the KERBEROS_CLIENT component is in the INSTALLED state, add the
-                  // current host to the set of hosts that should be handled...
-                  if (Service.Type.KERBEROS.name().equals(serviceName) &&
-                      Role.KERBEROS_CLIENT.name().equals(componentName) &&
-                      (sch.getState() == State.INSTALLED)) {
-                    hostsWithValidKerberosClient.add(hostname);
-
-                    int identitiesAdded = 0;
-
-                    // Lazily create the KerberosIdentityDataFileWriter instance...
-                    if (kerberosIdentityDataFileWriter == null) {
-                      kerberosIdentityDataFileWriter = kerberosIdentityDataFileWriterFactory.createKerberosIdentityDataFileWriter(identityDataFile);
-                    }
-
-                    // Add service-level principals (and keytabs)
-                    identitiesAdded += addIdentities(kerberosIdentityDataFileWriter, Collections.singleton(identity),
-                        null, hostname, serviceName, componentName, null, configurations);
-
-                    if (identitiesAdded > 0) {
-                      // Add the relevant principal name and keytab file data to the command params state
-                      if (!commandParameters.containsKey("principal_name") || !commandParameters.containsKey("keytab_file")) {
-                        commandParameters.put("principal_name",
-                            variableReplacementHelper.replaceVariables(identity.getPrincipalDescriptor().getValue(), configurations));
-                        commandParameters.put("keytab_file",
-                            variableReplacementHelper.replaceVariables(identity.getKeytabDescriptor().getFile(), configurations));
-                      }
-
-                      serviceComponentHostsToProcess.add(sch);
-                    }
-                  }
-                }
-              }
-            }
-          } catch (IOException e) {
-            String message = String.format("Failed to write index file - %s", identityDataFile.getAbsolutePath());
-            LOG.error(message);
-            throw new AmbariException(message, e);
-          } finally {
-            if (kerberosIdentityDataFileWriter != null) {
-              // Make sure the data file is closed
-              try {
-                kerberosIdentityDataFileWriter.close();
-              } catch (IOException e) {
-                LOG.warn("Failed to close the index file writer", e);
-              }
-            }
-          }
-
-          // If there are ServiceComponentHosts to process, make sure the administrator credentials
-          // are available
-          if (!serviceComponentHostsToProcess.isEmpty()) {
-            try {
-              validateKDCCredentials(kerberosDetails, cluster);
-            } catch (KerberosOperationException e) {
-              try {
-                FileUtils.deleteDirectory(dataDirectory);
-              } catch (Throwable t) {
-                LOG.warn(String.format("The data directory (%s) was not deleted due to an error condition - {%s}",
-                    dataDirectory.getAbsolutePath(), t.getMessage()), t);
-              }
-
-              throw e;
-            }
-          }
-
-          // Always set up the necessary stages to perform the tasks needed to complete the operation.
-          // Some stages may be no-ops, this is expected.
-          // Gather data needed to create stages and tasks...
-          Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
-          String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
-          Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster);
-          String hostParamsJson = StageUtils.getGson().toJson(hostParams);
-          String ambariServerHostname = StageUtils.getHostName();
-          ServiceComponentHostServerActionEvent event = new ServiceComponentHostServerActionEvent(
-              "AMBARI_SERVER",
-              ambariServerHostname, // TODO: Choose a random hostname from the cluster. All tasks for the AMBARI_SERVER service will be executed on this Ambari server
-              System.currentTimeMillis());
-          RoleCommandOrder roleCommandOrder = ambariManagementController.getRoleCommandOrder(cluster);
-
-          // If a RequestStageContainer does not already exist, create a new one...
-          if (requestStageContainer == null) {
-            requestStageContainer = new RequestStageContainer(
-                actionManager.getNextRequestId(),
-                null,
-                requestFactory,
-                actionManager);
-          }
-
-          // Use the handler implementation to setup the relevant stages.
-          handler.createStages(cluster, hosts, Collections.<String, Map<String, String>>emptyMap(),
-              clusterHostInfoJson, hostParamsJson, event, roleCommandOrder, kerberosDetails,
-              dataDirectory, requestStageContainer, serviceComponentHostsToProcess, hostsWithValidKerberosClient);
-
-
-          handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
-              dataDirectory, roleCommandOrder, requestStageContainer);
-        }
-      }
-    }
-
-    return requestStageContainer;
-  }
-
-
-  /**
-   * Gathers the Kerberos-related data from configurations and stores it in a new KerberosDetails
-   * instance.
-   *
-   * @param cluster          the relevant Cluster
-   * @param manageIdentities a Boolean value indicating how to override the configured behavior
-   *                         of managing Kerberos identities; if null the configured behavior
-   *                         will not be overridden
-   * @return a new KerberosDetails with the collected configuration data
-   * @throws AmbariException
-   */
-  private KerberosDetails getKerberosDetails(Cluster cluster, Boolean manageIdentities)
-      throws KerberosInvalidConfigurationException, AmbariException {
-
-    KerberosDetails kerberosDetails = new KerberosDetails();
-
-    if (cluster == null) {
-      String message = "The cluster object is not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    Config configKrb5Conf = cluster.getDesiredConfigByType("krb5-conf");
-    if (configKrb5Conf == null) {
-      String message = "The 'krb5-conf' configuration is not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    Map<String, String> krb5ConfProperties = configKrb5Conf.getProperties();
-    if (krb5ConfProperties == null) {
-      String message = "The 'krb5-conf' configuration properties are not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    Config configKerberosEnv = cluster.getDesiredConfigByType("kerberos-env");
-    if (configKerberosEnv == null) {
-      String message = "The 'kerberos-env' configuration is not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    Map<String, String> kerberosEnvProperties = configKerberosEnv.getProperties();
-    if (kerberosEnvProperties == null) {
-      String message = "The 'kerberos-env' configuration properties are not available";
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
-
-    kerberosDetails.setSecurityType(cluster.getSecurityType());
-    kerberosDetails.setDefaultRealm(kerberosEnvProperties.get("realm"));
-
-    kerberosDetails.setKerberosEnvProperties(kerberosEnvProperties);
-
-    // If set, override the manage identities behavior
-    kerberosDetails.setManageIdentities(manageIdentities);
-
-    String kdcTypeProperty = kerberosEnvProperties.get("kdc_type");
-    if ((kdcTypeProperty == null) && kerberosDetails.manageIdentities()) {
-      String message = "The 'kerberos-env/kdc_type' value must be set to a valid KDC type";
-      LOG.error(message);
-      throw new KerberosInvalidConfigurationException(message);
-    }
+  void setAuthToLocalRules(KerberosDescriptor kerberosDescriptor, Cluster cluster, String realm,
+                           Map<String, Map<String, String>> existingConfigurations,
+                           Map<String, Map<String, String>> kerberosConfigurations)
+      throws AmbariException;
 
-    KDCType kdcType;
-    try {
-      kdcType = KDCType.translate(kdcTypeProperty);
-    } catch (IllegalArgumentException e) {
-      String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
-      LOG.error(message);
-      throw new AmbariException(message);
-    }
+  List<ServiceComponentHost> getServiceComponentHostsToProcess(Cluster cluster,
+                                                               KerberosDescriptor kerberosDescriptor,
+                                                               Map<String, ? extends Collection<String>> serviceComponentFilter,
+                                                               Collection<String> identityFilter,
+                                                               Command<Boolean, ServiceComponentHost> shouldProcessCommand)
+      throws AmbariException;
 
-    // Set the KDCType to the the MIT_KDC as a fallback.
-    kerberosDetails.setKdcType((kdcType == null) ? KDCType.MIT_KDC : kdcType);
-
-    return kerberosDetails;
-  }
+  Set<String> getHostsWithValidKerberosClient(Cluster cluster) throws AmbariException;
 
   /**
    * Builds a composite Kerberos descriptor using the default Kerberos descriptor and a user-specified
@@ -1270,141 +289,13 @@ public class KerberosHelper {
    * @return the kerberos descriptor associated with the specified cluster
    * @throws AmbariException if unable to obtain the descriptor
    */
-  private KerberosDescriptor getKerberosDescriptor(Cluster cluster) throws AmbariException {
-    StackId stackId = cluster.getCurrentStackVersion();
-
-    // -------------------------------
-    // Get the default Kerberos descriptor from the stack, which is the same as the value from
-    // stacks/:stackName/versions/:version/artifacts/kerberos_descriptor
-    KerberosDescriptor defaultDescriptor = ambariMetaInfo.getKerberosDescriptor(stackId.getStackName(), stackId.getStackVersion());
-    // -------------------------------
-
-    // Get the user-supplied Kerberos descriptor from cluster/:clusterName/artifacts/kerberos_descriptor
-    KerberosDescriptor descriptor = null;
-
-    PredicateBuilder pb = new PredicateBuilder();
-    Predicate predicate = pb.begin().property("Artifacts/cluster_name").equals(cluster.getClusterName()).and().
-        property(ArtifactResourceProvider.ARTIFACT_NAME_PROPERTY).equals("kerberos_descriptor").
-        end().toPredicate();
-
-    synchronized (KerberosHelper.class) {
-      if (clusterController == null) {
-        clusterController = ClusterControllerHelper.getClusterController();
-      }
-    }
-
-    ResourceProvider artifactProvider =
-        clusterController.ensureResourceProvider(Resource.Type.Artifact);
-
-    Request request = new RequestImpl(Collections.<String>emptySet(),
-        Collections.<Map<String, Object>>emptySet(), Collections.<String, String>emptyMap(), null);
-
-    Set<Resource> response = null;
-    try {
-      response = artifactProvider.getResources(request, predicate);
-    } catch (SystemException e) {
-      e.printStackTrace();
-      throw new AmbariException("An unknown error occurred while trying to obtain the cluster kerberos descriptor", e);
-    } catch (UnsupportedPropertyException e) {
-      e.printStackTrace();
-      throw new AmbariException("An unknown error occurred while trying to obtain the cluster kerberos descriptor", e);
-    } catch (NoSuchParentResourceException e) {
-      // parent cluster doesn't exist.  shouldn't happen since we have the cluster instance
-      e.printStackTrace();
-      throw new AmbariException("An unknown error occurred while trying to obtain the cluster kerberos descriptor", e);
-    } catch (NoSuchResourceException e) {
-      // no descriptor registered, use the default from the stack
-    }
-
-    if (response != null && !response.isEmpty()) {
-      Resource descriptorResource = response.iterator().next();
-      Map<String, Map<String, Object>> propertyMap = descriptorResource.getPropertiesMap();
-      if (propertyMap != null) {
-        Map<String, Object> artifactData = propertyMap.get(ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY);
-        Map<String, Object> artifactDataProperties = propertyMap.get(ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "/properties");
-        HashMap<String, Object> data = new HashMap<String, Object>();
-
-        if (artifactData != null) {
-          data.putAll(artifactData);
-        }
-
-        if (artifactDataProperties != null) {
-          data.put("properties", artifactDataProperties);
-        }
-
-        descriptor = kerberosDescriptorFactory.createInstance(data);
-      }
-    }
-    // -------------------------------
-
-    // -------------------------------
-    // Attempt to build and return a composite of the default Kerberos descriptor and the user-supplied
-    // Kerberos descriptor. If the default descriptor exists, overlay the user-supplied Kerberos
-    // descriptor on top of it (if it exists) and return the composite; else return the user-supplied
-    // Kerberos descriptor. If both values are null, null may be returned.
-    if (defaultDescriptor == null) {
-      return descriptor;
-    } else {
-      if (descriptor != null) {
-        defaultDescriptor.update(descriptor);
-      }
-      return defaultDescriptor;
-    }
-    // -------------------------------
-  }
-
-  /**
-   * Creates a temporary directory within the system temporary directory
-   * <p/>
-   * The resulting directory is to be removed by the caller when desired.
-   *
-   * @return a File pointing to the new temporary directory, or null if one was not created
-   * @throws AmbariException if a new temporary directory cannot be created
-   */
-  private File createTemporaryDirectory() throws AmbariException {
-    String tempDirectoryPath = configuration.getProperty(Configuration.SERVER_TMP_DIR_KEY);
-
-    if ((tempDirectoryPath == null) || tempDirectoryPath.isEmpty()) {
-      tempDirectoryPath = System.getProperty("java.io.tmpdir");
-    }
-
-    try {
-      if (tempDirectoryPath == null) {
-        throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
-      }
-
-      File directory;
-      int tries = 0;
-      long now = System.currentTimeMillis();
-
-      do {
-        directory = new File(tempDirectoryPath, String.format("%s%d-%d.d",
-            KerberosServerAction.DATA_DIRECTORY_PREFIX, now, tries));
-
-        if ((directory.exists()) || !directory.mkdirs()) {
-          directory = null; // Rest and try again...
-        } else {
-          LOG.debug("Created temporary directory: {}", directory.getAbsolutePath());
-        }
-      } while ((directory == null) && (++tries < 100));
-
-      if (directory == null) {
-        throw new IOException(String.format("Failed to create a temporary directory in %s", tempDirectoryPath));
-      }
-
-      return directory;
-    } catch (IOException e) {
-      String message = "Failed to create the temporary data directory.";
-      LOG.error(message, e);
-      throw new AmbariException(message, e);
-    }
-  }
+  KerberosDescriptor getKerberosDescriptor(Cluster cluster) throws AmbariException;
 
   /**
    * Merges configuration from a Map of configuration updates into a main configurations Map.  Each
    * property in the updates Map is processed to replace variables using the replacement Map.
    * <p/>
-   * See {@link VariableReplacementHelper#replaceVariables(String, Map)}
+   * See {@link org.apache.ambari.server.state.kerberos.VariableReplacementHelper#replaceVariables(String, java.util.Map)}
    * for information on variable replacement.
    *
    * @param configurations a Map of configurations
@@ -1413,212 +304,37 @@ public class KerberosHelper {
    * @return the merged Map
    * @throws AmbariException
    */
-  private Map<String, Map<String, String>> mergeConfigurations(Map<String, Map<String, String>> configurations,
-                                                               Map<String, KerberosConfigurationDescriptor> updates,
-                                                               Map<String, Map<String, String>> replacements)
-      throws AmbariException {
-
-    if ((updates != null) && !updates.isEmpty()) {
-      if (configurations == null) {
-        configurations = new HashMap<String, Map<String, String>>();
-      }
-
-      for (Map.Entry<String, KerberosConfigurationDescriptor> entry : updates.entrySet()) {
-        String type = entry.getKey();
-        KerberosConfigurationDescriptor configurationDescriptor = entry.getValue();
-
-        if (configurationDescriptor != null) {
-          Map<String, String> updatedProperties = configurationDescriptor.getProperties();
-          mergeConfigurations(configurations, type, updatedProperties, replacements);
-        }
-      }
-    }
-
-    return configurations;
-  }
+  Map<String, Map<String, String>> mergeConfigurations(Map<String, Map<String, String>> configurations,
+                                                       Map<String, KerberosConfigurationDescriptor> updates,
+                                                       Map<String, Map<String, String>> replacements)
+      throws AmbariException;
 
   /**
-   * Merges the specified configuration property in a map of configuration types.
-   * The supplied property is processed to replace variables using the replacement Map.
-   * <p/>
-   * See {@link VariableReplacementHelper#replaceVariables(String, Map)}
-   * for information on variable replacement.
+   * Adds identities to the KerberosIdentityDataFileWriter.
    *
-   * @param configurations             the Map of configuration types to update
-   * @param configurationSpecification the config-type/property_name value specifying the property to set
-   * @param value                      the value of the property to set
-   * @param replacements               a Map of (grouped) replacement values
-   * @throws AmbariException
+   * @param kerberosIdentityDataFileWriter a KerberosIdentityDataFileWriter to use for storing identity
+   *                                       records
+   * @param identities                     a List of KerberosIdentityDescriptors to add to the data
+   *                                       file
+   * @param identityFilter                 a Collection of identity names indicating the relevant identities -
+   *                                       if null, no filter is relevant; if empty, the filter indicates no
+   *                                       relevant identities
+   * @param hostname                       the relevant hostname
+   * @param serviceName                    the relevant service name
+   * @param componentName                  the relevant component name
+   * @param kerberosConfigurations         a map of the configurations to update with identity-specific
+   *                                       values
+   * @param configurations                 a Map of configurations to use a replacements for variables
+   *                                       in identity fields
+   * @return an integer indicating the number of identities added to the data file
+   * @throws java.io.IOException if an error occurs while writing a record to the data file
    */
-  private void mergeConfiguration(Map<String, Map<String, String>> configurations,
-                                  String configurationSpecification,
-                                  String value,
-                                  Map<String, Map<String, String>> replacements) throws AmbariException {
-
-    if (configurationSpecification != null) {
-      String[] parts = configurationSpecification.split("/");
-      if (parts.length == 2) {
-        String type = parts[0];
-        String property = parts[1];
-
-        mergeConfigurations(configurations, type, Collections.singletonMap(property, value), replacements);
-      }
-    }
-  }
-
-  /**
-   * Merges configuration from a Map of configuration updates into a main configurations Map.  Each
-   * property in the updates Map is processed to replace variables using the replacement Map.
-   * <p/>
-   * See {@link VariableReplacementHelper#replaceVariables(String, Map)}
-   * for information on variable replacement.
-   *
-   * @param configurations a Map of configurations
-   * @param type           the configuration type
-   * @param updates        a Map of property updates
-   * @param replacements   a Map of (grouped) replacement values
-   * @throws AmbariException
-   */
-  private void mergeConfigurations(Map<String, Map<String, String>> configurations, String type,
-                                   Map<String, String> updates,
-                                   Map<String, Map<String, String>> replacements) throws AmbariException {
-    if (updates != null) {
-      Map<String, String> existingProperties = configurations.get(type);
-      if (existingProperties == null) {
-        existingProperties = new HashMap<String, String>();
-        configurations.put(type, existingProperties);
-      }
-
-      for (Map.Entry<String, String> property : updates.entrySet()) {
-        existingProperties.put(
-            variableReplacementHelper.replaceVariables(property.getKey(), replacements),
-            variableReplacementHelper.replaceVariables(property.getValue(), replacements)
-        );
-      }
-    }
-  }
-
-  /**
-   * Adds identities to the KerberosIdentityDataFileWriter.
-   *
-   * @param kerberosIdentityDataFileWriter a KerberosIdentityDataFileWriter to use for storing identity
-   *                                        records
-   * @param identities                      a List of KerberosIdentityDescriptors to add to the data
-   *                                        file
-   * @param identityFilter                  a Collection of identity names indicating the relevant identities -
-   *                                        if null, no filter is relevant; if empty, the filter indicates no
-   *                                        relevant identities
-   * @param hostname                        the relevant hostname
-   * @param serviceName                     the relevant service name
-   * @param componentName                   the relevant component name
-   * @param kerberosConfigurations          a map of the configurations to update with identity-specific
-   *                                        values
-   * @param configurations                  a Map of configurations to use a replacements for variables
-   *                                        in identity fields
-   * @return an integer indicating the number of identities added to the data file
-   * @throws java.io.IOException if an error occurs while writing a record to the data file
-   */
-  private int addIdentities(KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter,
-                            Collection<KerberosIdentityDescriptor> identities,
-                            Collection<String> identityFilter, String hostname, String serviceName,
-                            String componentName, Map<String, Map<String, String>> kerberosConfigurations,
-                            Map<String, Map<String, String>> configurations)
-      throws IOException {
-    int identitiesAdded = 0;
-
-    if (identities != null) {
-      for (KerberosIdentityDescriptor identity : identities) {
-        // If there is no filter or the filter contains the current identity's name...
-        if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
-          KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
-          String principal = null;
-          String principalType = null;
-          String principalConfiguration = null;
-
-          if (principalDescriptor != null) {
-            principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
-            principalType = principalDescriptor.getType().name().toLowerCase();
-            principalConfiguration = variableReplacementHelper.replaceVariables(principalDescriptor.getConfiguration(), configurations);
-          }
-
-          if (principal != null) {
-            KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
-            String keytabFilePath = null;
-            String keytabFileOwnerName = null;
-            String keytabFileOwnerAccess = null;
-            String keytabFileGroupName = null;
-            String keytabFileGroupAccess = null;
-            String keytabFileConfiguration = null;
-            boolean keytabIsCachable = false;
-
-            if (keytabDescriptor != null) {
-              keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
-              keytabFileOwnerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
-              keytabFileOwnerAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations);
-              keytabFileGroupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
-              keytabFileGroupAccess = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupAccess(), configurations);
-              keytabFileConfiguration = variableReplacementHelper.replaceVariables(keytabDescriptor.getConfiguration(), configurations);
-              keytabIsCachable = keytabDescriptor.isCachable();
-            }
-
-            // Append an entry to the action data file builder...
-            kerberosIdentityDataFileWriter.writeRecord(
-                hostname,
-                serviceName,
-                componentName,
-                principal,
-                principalType,
-                keytabFilePath,
-                keytabFileOwnerName,
-                keytabFileOwnerAccess,
-                keytabFileGroupName,
-                keytabFileGroupAccess,
-                (keytabIsCachable) ? "true" : "false");
-
-            // Add the principal-related configuration to the map of configurations
-            mergeConfiguration(kerberosConfigurations, principalConfiguration, principal, null);
-
-            // Add the keytab-related configuration to the map of configurations
-            mergeConfiguration(kerberosConfigurations, keytabFileConfiguration, keytabFilePath, null);
-
-            identitiesAdded++;
-          }
-        }
-      }
-    }
-
-    return identitiesAdded;
-  }
-
-  /**
-   * Adds identities to the AuthToLocalBuilder.
-   *
-   * @param authToLocalBuilder the AuthToLocalBuilder to use to build the auth_to_local mapping
-   * @param identities         a List of KerberosIdentityDescriptors to process
-   * @param identityFilter     a Collection of identity names indicating the relevant identities -
-   *                           if null, no filter is relevant; if empty, the filter indicates no
-   *                           relevant identities
-   * @param configurations     a Map of configurations to use a replacements for variables
-   *                           in identity fields
-   * @throws org.apache.ambari.server.AmbariException
-   */
-  private void addIdentities(AuthToLocalBuilder authToLocalBuilder,
-                             List<KerberosIdentityDescriptor> identities, Collection<String> identityFilter,
-                             Map<String, Map<String, String>> configurations) throws AmbariException {
-    if (identities != null) {
-      for (KerberosIdentityDescriptor identity : identities) {
-        // If there is no filter or the filter contains the current identity's name...
-        if ((identityFilter == null) || identityFilter.contains(identity.getName())) {
-          KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
-          if (principalDescriptor != null) {
-            authToLocalBuilder.addRule(
-                variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations),
-                variableReplacementHelper.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
-          }
-        }
-      }
-    }
-  }
+  int addIdentities(KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter,
+                    Collection<KerberosIdentityDescriptor> identities,
+                    Collection<String> identityFilter, String hostname, String serviceName,
+                    String componentName, Map<String, Map<String, String>> kerberosConfigurations,
+                    Map<String, Map<String, String>> configurations)
+      throws IOException;
 
   /**
    * Calculates the map of configurations relative to the cluster and host.
@@ -1631,267 +347,9 @@ public class KerberosHelper {
    * @return a Map of calculated configuration types
    * @throws AmbariException
    */
-  private Map<String, Map<String, String>> calculateConfigurations(Cluster cluster, String hostname,
-                                                                   Map<String, String> kerberosDescriptorProperties)
-      throws AmbariException {
-    // For a configuration type, both tag and an actual configuration can be stored
-    // Configurations from the tag is always expanded and then over-written by the actual
-    // global:version1:{a1:A1,b1:B1,d1:D1} + global:{a1:A2,c1:C1,DELETED_d1:x} ==>
-    // global:{a1:A2,b1:B1,c1:C1}
-    Map<String, Map<String, String>> configurations = new HashMap<String, Map<String, String>>();
-    Map<String, Map<String, String>> configurationTags = ambariManagementController.findConfigurationTagsWithOverrides(cluster, hostname);
-
-    if (configurationTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) {
-      configHelper.applyCustomConfig(
-          configurations, Configuration.GLOBAL_CONFIG_TAG,
-          Configuration.RCA_ENABLED_PROPERTY, "false", false);
-    }
-
-    Map<String, Map<String, String>> configProperties = configHelper.getEffectiveConfigProperties(cluster, configurationTags);
-
-    // Apply the configurations saved with the Execution Cmd on top of
-    // derived configs - This will take care of all the hacks
-    for (Map.Entry<String, Map<String, String>> entry : configProperties.entrySet()) {
-      String type = entry.getKey();
-      Map<String, String> allLevelMergedConfig = entry.getValue();
-      Map<String, String> configuration = configurations.get(type);
-
-      if (configuration == null) {
-        configuration = new HashMap<String, String>(allLevelMergedConfig);
-      } else {
-        Map<String, String> mergedConfig = configHelper.getMergedConfig(allLevelMergedConfig, configuration);
-        configuration.clear();
-        configuration.putAll(mergedConfig);
-      }
-
-      configurations.put(type, configuration);
-    }
-
-    // A map to hold un-categorized properties.  This may come from the KerberosDescriptor
-    // and will also contain a value for the current host
-    Map<String, String> generalProperties = configurations.get("");
-    if (generalProperties == null) {
-      generalProperties = new HashMap<String, String>();
-      configurations.put("", generalProperties);
-    }
-
-    // If any properties are set in the calculated KerberosDescriptor, add them into the
-    // Map of configurations as an un-categorized type (using an empty string)
-    if (kerberosDescriptorProperties != null) {
-      generalProperties.putAll(kerberosDescriptorProperties);
-    }
-
-    // Add the current hostname under "host" and "hostname"
-    generalProperties.put("host", hostname);
-    generalProperties.put("hostname", hostname);
-
-    // Add the current cluster's name
-    generalProperties.put("cluster_name", cluster.getClusterName());
-
-    // add clusterHostInfo config
-    Map<String, String> componentHosts = new HashMap<String, String>();
-    for (Map.Entry<String, Service> service : cluster.getServices().entrySet()) {
-      for (Map.Entry<String, ServiceComponent> serviceComponent : service.getValue().getServiceComponents().entrySet()) {
-        if (StageUtils.getComponentToClusterInfoKeyMap().keySet().contains(serviceComponent.getValue().getName())) {
-          componentHosts.put(StageUtils.getComponentToClusterInfoKeyMap().get(serviceComponent.getValue().getName()),
-              StringUtils.join(serviceComponent.getValue().getServiceComponentHosts().keySet(), ","));
-        }
-      }
-    }
-    configurations.put("clusterHostInfo", componentHosts);
-
-    return configurations;
-  }
-
-  /**
-   * Creates a new stage
-   *
-   * @param id              the new stage's id
-   * @param cluster         the relevant Cluster
-   * @param requestId       the relevant request Id
-   * @param requestContext  a String describing the stage
-   * @param clusterHostInfo JSON-encoded clusterHostInfo structure
-   * @param commandParams   JSON-encoded command parameters
-   * @param hostParams      JSON-encoded host parameters
-   * @return a newly created Stage
-   */
-  private Stage createNewStage(long id, Cluster cluster, long requestId,
-                               String requestContext, String clusterHostInfo,
-                               String commandParams, String hostParams) {
-    Stage stage = stageFactory.createNew(requestId,
-        BASE_LOG_DIR + File.pathSeparator + requestId,
-        cluster.getClusterName(),
-        cluster.getClusterId(),
-        requestContext,
-        clusterHostInfo,
-        commandParams,
-        hostParams);
-
-    stage.setStageId(id);
-    return stage;
-  }
-
-  /**
-   * Creates a new stage with a single task describing the ServerAction class to invoke and the other
-   * task-related information.
-   *
-   * @param id                the new stage's id
-   * @param cluster           the relevant Cluster
-   * @param requestId         the relevant request Id
-   * @param requestContext    a String describing the stage
-   * @param clusterHostInfo   JSON-encoded clusterHostInfo structure
-   * @param commandParams     JSON-encoded command parameters
-   * @param hostParams        JSON-encoded host parameters
-   * @param actionClass       The ServeAction class that implements the action to invoke
-   * @param event             The relevant ServiceComponentHostServerActionEvent
-   * @param commandParameters a Map of command parameters to attach to the task added to the new
-   *                          stage
-   * @param commandDetail     a String declaring a descriptive name to pass to the action - null or an
-   *                          empty string indicates no value is to be set
-   * @param timeout           the timeout for the task/action  @return a newly created Stage
-   */
-  private Stage createServerActionStage(long id, Cluster cluster, long requestId,
-                                        String requestContext, String clusterHostInfo,
-                                        String commandParams, String hostParams,
-                                        Class<? extends ServerAction> actionClass,
-                                        ServiceComponentHostServerActionEvent event,
-                                        Map<String, String> commandParameters, String commandDetail,
-                                        Integer timeout) throws AmbariException {
-
-    Stage stage = createNewStage(id, cluster, requestId, requestContext, clusterHostInfo, commandParams, hostParams);
-    stage.addServerActionCommand(actionClass.getName(),
-        Role.AMBA

<TRUNCATED>

[2/5] ambari git commit: AMBARI-11396. Kerberos: UI shows Kerberize Cluster step as failed with a retry button, but the backend keeps moving forward to Kerberize the cluster (rlevas)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
new file mode 100644
index 0000000..b3b6b48
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import com.google.common.reflect.TypeToken;
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
+import org.apache.ambari.server.utils.StageUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public abstract class AbstractPrepareKerberosServerAction extends KerberosServerAction {
+  private final static Logger LOG = LoggerFactory.getLogger(AbstractPrepareKerberosServerAction.class);
+
+  /**
+   * KerberosHelper
+   */
+  @Inject
+  private KerberosHelper kerberosHelper;
+
+  @Inject
+  private KerberosIdentityDataFileWriterFactory kerberosIdentityDataFileWriterFactory;
+
+  @Override
+  protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal, KerberosOperationHandler operationHandler, Map<String, String> kerberosConfiguration, Map<String, Object> requestSharedDataContext) throws AmbariException {
+    throw new UnsupportedOperationException();
+  }
+
+  protected void processIdentities(Cluster cluster, KerberosDescriptor kerberosDescriptor, List<ServiceComponentHost> schToProcess,
+                                   Collection<String> identityFilter, String dataDirectory,
+                                   Map<String, Map<String, String>> kerberosConfigurations) throws AmbariException {
+
+    actionLog.writeStdOut("Processing Kerberos Identities");
+
+    if (!schToProcess.isEmpty()) {
+      if(dataDirectory == null) {
+        String message = "The data directory has not been set.  Generated data can not be stored.";
+        LOG.error(message);
+        throw new AmbariException(message);
+      }
+
+      // Create the file used to store details about principals and keytabs to create
+      File identityDataFile = new File(dataDirectory, KerberosIdentityDataFileWriter.DATA_FILE_NAME);
+
+      // Group ServiceComponentHosts with their relevant hosts so we can create the relevant host-based
+      // configurations once per host, rather than for every ServiceComponentHost we encounter
+      Map<String, List<ServiceComponentHost>> hostServiceComponentHosts = new HashMap<String, List<ServiceComponentHost>>();
+      for (ServiceComponentHost sch : schToProcess) {
+        String hostName = sch.getHostName();
+        List<ServiceComponentHost> serviceComponentHosts = hostServiceComponentHosts.get(hostName);
+
+        if (serviceComponentHosts == null) {
+          serviceComponentHosts = new ArrayList<ServiceComponentHost>();
+          hostServiceComponentHosts.put(hostName, serviceComponentHosts);
+        }
+
+        serviceComponentHosts.add(sch);
+      }
+
+      Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
+      KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter = null;
+
+      try {
+        for (Map.Entry<String, List<ServiceComponentHost>> entry : hostServiceComponentHosts.entrySet()) {
+          String hostName = entry.getKey();
+          List<ServiceComponentHost> serviceComponentHosts = entry.getValue();
+
+          // Calculate the current host-specific configurations. These will be used to replace
+          // variables within the Kerberos descriptor data
+          Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, hostName, kerberosDescriptorProperties);
+
+          try {
+            // Iterate over the components installed on the current host to get the service and
+            // component-level Kerberos descriptors in order to determine which principals,
+            // keytab files, and configurations need to be created or updated.
+            for (ServiceComponentHost sch : serviceComponentHosts) {
+              String serviceName = sch.getServiceName();
+              String componentName = sch.getServiceComponentName();
+
+              KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
+
+              if (serviceDescriptor != null) {
+                List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
+
+                // Lazily create the KerberosIdentityDataFileWriter instance...
+                if (kerberosIdentityDataFileWriter == null) {
+                  actionLog.writeStdOut(String.format("Writing Kerberos identity data metadata file to %s", identityDataFile.getAbsolutePath()));
+                  kerberosIdentityDataFileWriter = kerberosIdentityDataFileWriterFactory.createKerberosIdentityDataFileWriter(identityDataFile);
+                }
+
+                // Add service-level principals (and keytabs)
+                kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
+                    identityFilter, hostName, serviceName, componentName, kerberosConfigurations, configurations);
+
+                KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
+
+                if (componentDescriptor != null) {
+                  List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
+
+                  // Calculate the set of configurations to update and replace any variables
+                  // using the previously calculated Map of configurations for the host.
+                  kerberosHelper.mergeConfigurations(kerberosConfigurations,
+                      componentDescriptor.getConfigurations(true), configurations);
+
+                  // Add component-level principals (and keytabs)
+                  kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
+                      identityFilter, hostName, serviceName, componentName, kerberosConfigurations, configurations);
+                }
+              }
+            }
+          } catch (IOException e) {
+            String message = String.format("Failed to write index file - %s", identityDataFile.getAbsolutePath());
+            LOG.error(message, e);
+            actionLog.writeStdOut(message);
+            actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+            throw new AmbariException(message, e);
+          }
+        }
+      }
+      finally {
+        if (kerberosIdentityDataFileWriter != null) {
+          // Make sure the data file is closed
+          try {
+            kerberosIdentityDataFileWriter.close();
+          } catch (IOException e) {
+            String message = "Failed to close the index file writer";
+            LOG.warn(message, e);
+            actionLog.writeStdOut(message);
+            actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+          }
+        }
+      }
+    }
+  }
+
+  protected Map<String, ? extends Collection<String>> getServiceComponentFilter() {
+    String serializedValue = getCommandParameterValue(SERVICE_COMPONENT_FILTER);
+
+    if(serializedValue != null) {
+      Type type = new TypeToken<Map<String, ? extends Collection<String>>>() {}.getType();
+      return StageUtils.getGson().fromJson(serializedValue, type);
+    }
+    else {
+      return null;
+    }
+  }
+
+  protected Collection<String> getIdentityFilter() {
+    String serializedValue = getCommandParameterValue(IDENTITY_FILTER);
+
+    if(serializedValue != null) {
+      Type type = new TypeToken<Collection<String>>() {}.getType();
+      return StageUtils.getGson().fromJson(serializedValue, type);
+    }
+    else {
+      return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
index 34780d6..c7123a4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
@@ -150,13 +150,19 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
 
     if (identityRecord != null) {
       String message;
+      String dataDirectory = getDataDirectoryPath();
 
       if (operationHandler == null) {
         message = String.format("Failed to create keytab file for %s, missing KerberosOperationHandler", evaluatedPrincipal);
         actionLog.writeStdErr(message);
         LOG.error(message);
         commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
-      } else {
+      } else if (dataDirectory == null) {
+        message = "The data directory has not been set. Generated keytab files can not be stored.";
+        LOG.error(message);
+        commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+      }
+      else {
         Map<String, String> principalPasswordMap = getPrincipalPasswordMap(requestSharedDataContext);
         Map<String, Integer> principalKeyNumberMap = getPrincipalKeyNumberMap(requestSharedDataContext);
 
@@ -178,7 +184,7 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
 
             // Determine where to store the keytab file.  It should go into a host-specific
             // directory under the previously determined data directory.
-            File hostDirectory = new File(getDataDirectoryPath(), hostName);
+            File hostDirectory = new File(dataDirectory, hostName);
 
             // Ensure the host directory exists...
             if (!hostDirectory.exists() && hostDirectory.mkdirs()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index 55018de..57e5527 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -70,6 +70,15 @@ public abstract class KerberosServerAction extends AbstractServerAction {
   public static final String DEFAULT_REALM = "default_realm";
 
   /**
+   * A (command parameter) property name used to hold the (serialized) service/component filter map.
+   */
+  public static final String SERVICE_COMPONENT_FILTER = "service_component_filter";
+
+  /**
+   * A (command parameter) property name used to hold the (serialized) identity filter list.
+   */
+  public static final String IDENTITY_FILTER = "identity_filter";
+  /**
    * A (command parameter) property name used to hold the relevant KDC type value.  See
    * {@link org.apache.ambari.server.serveraction.kerberos.KDCType} for valid values
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
new file mode 100644
index 0000000..68fcca0
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
@@ -0,0 +1,250 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.SecurityState;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * PrepareEnableKerberosServerAction is a ServerAction implementation that prepares metadata needed
+ * to enable Kerberos on the cluster.
+ */
+public class PrepareDisableKerberosServerAction extends AbstractPrepareKerberosServerAction {
+  private final static Logger LOG = LoggerFactory.getLogger(PrepareDisableKerberosServerAction.class);
+
+  /**
+   * KerberosHelper
+   */
+  @Inject
+  private KerberosHelper kerberosHelper;
+
+  @Inject
+  private ConfigHelper configHelper;
+
+  @Inject
+  private KerberosConfigDataFileWriterFactory kerberosConfigDataFileWriterFactory;
+
+
+  /**
+   * Called to execute this action.  Upon invocation, calls
+   * {@link KerberosServerAction#processIdentities(Map)}
+   * to iterate through the Kerberos identity metadata and call
+   * {@link PrepareDisableKerberosServerAction#processIdentities(Map)}
+   * for each identity to process.
+   *
+   * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+   *                                 to a given request
+   * @return a CommandReport indicating the result of this action
+   * @throws AmbariException
+   * @throws InterruptedException
+   */
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
+      AmbariException, InterruptedException {
+
+    Cluster cluster = getCluster();
+
+    if (cluster == null) {
+      throw new AmbariException("Missing cluster object");
+    }
+
+    KerberosDescriptor kerberosDescriptor = kerberosHelper.getKerberosDescriptor(cluster);
+    Collection<String> identityFilter = getIdentityFilter();
+    List<ServiceComponentHost> schToProcess = kerberosHelper.getServiceComponentHostsToProcess(cluster,
+        kerberosDescriptor,
+        getServiceComponentFilter(),
+        identityFilter,
+        new KerberosHelper.Command<Boolean, ServiceComponentHost>() {
+          @Override
+          public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
+            return (sch.getDesiredSecurityState() == SecurityState.UNSECURED) &&  (sch.getSecurityState() != SecurityState.UNSECURED);
+          }
+        });
+
+    Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
+    Map<String, String> commandParameters = getCommandParameters();
+    String dataDirectory = getCommandParameterValue(commandParameters, DATA_DIRECTORY);
+
+    int schCount = schToProcess.size();
+    if (schCount == 0) {
+      actionLog.writeStdOut("There are no components to process");
+    } else if (schCount == 1) {
+      actionLog.writeStdOut(String.format("Processing %d component", schCount));
+    } else {
+      actionLog.writeStdOut(String.format("Processing %d components", schCount));
+    }
+
+    processIdentities(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory, kerberosConfigurations);
+
+    actionLog.writeStdOut("Determining configuration changes");
+    // Ensure the cluster-env/security_enabled flag is set properly
+    Map<String, String> clusterEnvProperties = kerberosConfigurations.get(KerberosHelper.SECURITY_ENABLED_CONFIG_TYPE);
+    if (clusterEnvProperties == null) {
+      clusterEnvProperties = new HashMap<String, String>();
+      kerberosConfigurations.put(KerberosHelper.SECURITY_ENABLED_CONFIG_TYPE, clusterEnvProperties);
+    }
+    clusterEnvProperties.put(KerberosHelper.SECURITY_ENABLED_PROPERTY_NAME, "false");
+
+    // If there are configurations to set, create a (temporary) data file to store the configuration
+    // updates and fill it will the relevant configurations.
+    if (!kerberosConfigurations.isEmpty()) {
+      if(dataDirectory == null) {
+        String message = "The data directory has not been set.  Generated data can not be stored.";
+        LOG.error(message);
+        throw new AmbariException(message);
+      }
+
+      Map<String, Collection<String>> configurationsToRemove = new HashMap<String, Collection<String>>();
+      File configFile = new File(dataDirectory, KerberosConfigDataFileWriter.DATA_FILE_NAME);
+      KerberosConfigDataFileWriter kerberosConfDataFileWriter = null;
+
+      // Fill the configurationsToRemove map with all Kerberos-related configurations.  Values
+      // needed to be kept will have new values from the stack definition and thus pruned from
+      // this map.
+      for (Map.Entry<String, Map<String, String>> entry : kerberosConfigurations.entrySet()) {
+        configurationsToRemove.put(entry.getKey(), new HashSet<String>(entry.getValue().keySet()));
+      }
+
+      // Remove cluster-env from the set of configurations to remove since it has no default set
+      // or properties and the logic below will remove all from this set - which is not desirable.
+      configurationsToRemove.remove("cluster-env");
+
+      if (!schToProcess.isEmpty()) {
+        Set<String> visitedServices = new HashSet<String>();
+
+        for (ServiceComponentHost sch : schToProcess) {
+          String serviceName = sch.getServiceName();
+
+          if (!visitedServices.contains(serviceName)) {
+            StackId stackVersion = sch.getStackVersion();
+
+            visitedServices.add(serviceName);
+
+            if (stackVersion != null) {
+              Set<PropertyInfo> serviceProperties = configHelper.getServiceProperties(stackVersion, serviceName, true);
+
+              if (serviceProperties != null) {
+                for (PropertyInfo propertyInfo : serviceProperties) {
+                  String filename = propertyInfo.getFilename();
+
+                  if (filename != null) {
+                    String type = ConfigHelper.fileNameToConfigType(filename);
+                    String propertyName = propertyInfo.getName();
+
+                    Map<String, String> kerberosConfiguration = kerberosConfigurations.get(type);
+                    if ((kerberosConfiguration != null) && (kerberosConfiguration.containsKey(propertyName))) {
+                      kerberosConfiguration.put(propertyName, propertyInfo.getValue());
+                    }
+
+                    // Remove the relevant from the set of properties (for the given type) to remove
+                    Collection<String> propertiesToRemove = configurationsToRemove.get(type);
+                    if (propertiesToRemove != null) {
+                      propertiesToRemove.remove(propertyName);
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+
+      actionLog.writeStdOut(String.format("Writing configuration changes metadata file to %s", configFile.getAbsolutePath()));
+      try {
+        kerberosConfDataFileWriter = kerberosConfigDataFileWriterFactory.createKerberosConfigDataFileWriter(configFile);
+
+        for (Map.Entry<String, Map<String, String>> entry : kerberosConfigurations.entrySet()) {
+          String type = entry.getKey();
+          Map<String, String> properties = entry.getValue();
+          Collection<String> propertiesToRemove = configurationsToRemove.get(type);
+
+          if (properties != null) {
+            for (Map.Entry<String, String> configTypeEntry : properties.entrySet()) {
+              String propertyName = configTypeEntry.getKey();
+
+              // Ignore properties that should be removed
+              if ((propertiesToRemove == null) || !propertiesToRemove.contains(propertyName)) {
+                String value = configTypeEntry.getValue();
+                String operation = (value == null)
+                    ? KerberosConfigDataFileWriter.OPERATION_TYPE_REMOVE
+                    : KerberosConfigDataFileWriter.OPERATION_TYPE_SET;
+
+                kerberosConfDataFileWriter.addRecord(type, propertyName, value, operation);
+              }
+            }
+          }
+        }
+
+        // Declare which properties to remove from the configurations
+        for (Map.Entry<String, Collection<String>> entry : configurationsToRemove.entrySet()) {
+          String type = entry.getKey();
+          Collection<String> properties = entry.getValue();
+
+          if (properties != null) {
+            for (String propertyName : properties) {
+              kerberosConfDataFileWriter.addRecord(type, propertyName, null, KerberosConfigDataFileWriter.OPERATION_TYPE_REMOVE);
+            }
+          }
+        }
+      } catch (IOException e) {
+        String message = String.format("Failed to write kerberos configurations file - %s", configFile.getAbsolutePath());
+        LOG.error(message, e);
+        actionLog.writeStdOut(message);
+        actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+        throw new AmbariException(message, e);
+      } finally {
+        if (kerberosConfDataFileWriter != null) {
+          try {
+            kerberosConfDataFileWriter.close();
+          } catch (IOException e) {
+            String message = "Failed to close the kerberos configurations file writer";
+            LOG.warn(message, e);
+            actionLog.writeStdOut(message);
+            actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+          }
+        }
+      }
+    }
+
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
new file mode 100644
index 0000000..2295eeb
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.SecurityState;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * PrepareEnableKerberosServerAction is a ServerAction implementation that prepares metadata needed
+ * to enable Kerberos on the cluster.
+ */
+public class PrepareEnableKerberosServerAction extends AbstractPrepareKerberosServerAction {
+  private final static Logger LOG = LoggerFactory.getLogger(PrepareEnableKerberosServerAction.class);
+
+  /**
+   * KerberosHelper
+   */
+  @Inject
+  private KerberosHelper kerberosHelper;
+
+  @Inject
+  private KerberosConfigDataFileWriterFactory kerberosConfigDataFileWriterFactory;
+
+
+  /**
+   * Called to execute this action.  Upon invocation, calls
+   * {@link KerberosServerAction#processIdentities(Map)}
+   * to iterate through the Kerberos identity metadata and call
+   * {@link PrepareEnableKerberosServerAction#processIdentities(Map)}
+   * for each identity to process.
+   *
+   * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+   *                                 to a given request
+   * @return a CommandReport indicating the result of this action
+   * @throws AmbariException
+   * @throws InterruptedException
+   */
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
+      AmbariException, InterruptedException {
+
+    Cluster cluster = getCluster();
+
+    if (cluster == null) {
+      throw new AmbariException("Missing cluster object");
+    }
+
+    KerberosDescriptor kerberosDescriptor = kerberosHelper.getKerberosDescriptor(cluster);
+    Collection<String> identityFilter = getIdentityFilter();
+    List<ServiceComponentHost> schToProcess = kerberosHelper.getServiceComponentHostsToProcess(cluster,
+        kerberosDescriptor,
+        getServiceComponentFilter(),
+        identityFilter,
+        new KerberosHelper.Command<Boolean, ServiceComponentHost>() {
+          @Override
+          public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
+            return (sch.getDesiredSecurityState() == SecurityState.SECURED_KERBEROS) && (sch.getSecurityState() != SecurityState.SECURED_KERBEROS);
+          }
+        });
+
+    Map<String, String> commandParameters = getCommandParameters();
+    String dataDirectory = getCommandParameterValue(commandParameters, DATA_DIRECTORY);
+    Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
+
+    int schCount = schToProcess.size();
+    if (schCount == 0) {
+      actionLog.writeStdOut("There are no components to process");
+    } else if (schCount == 1) {
+      actionLog.writeStdOut(String.format("Processing %d component", schCount));
+    } else {
+      actionLog.writeStdOut(String.format("Processing %d components", schCount));
+    }
+
+    processIdentities(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory, kerberosConfigurations);
+
+    if (!schToProcess.isEmpty()) {
+      actionLog.writeStdOut("Creating auth-to-local rules");
+      kerberosHelper.setAuthToLocalRules(kerberosDescriptor, cluster, getDefaultRealm(commandParameters),
+          kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor.getProperties()),
+          kerberosConfigurations);
+    }
+
+
+    actionLog.writeStdOut("Determining configuration changes");
+    // Ensure the cluster-env/security_enabled flag is set properly
+    Map<String, String> clusterEnvProperties = kerberosConfigurations.get(KerberosHelper.SECURITY_ENABLED_CONFIG_TYPE);
+    if (clusterEnvProperties == null) {
+      clusterEnvProperties = new HashMap<String, String>();
+      kerberosConfigurations.put(KerberosHelper.SECURITY_ENABLED_CONFIG_TYPE, clusterEnvProperties);
+    }
+    clusterEnvProperties.put(KerberosHelper.SECURITY_ENABLED_PROPERTY_NAME, "true");
+
+    // If there are configurations to set, create a (temporary) data file to store the configuration
+    // updates and fill it will the relevant configurations.
+    if (!kerberosConfigurations.isEmpty()) {
+      if(dataDirectory == null) {
+        String message = "The data directory has not been set.  Generated data can not be stored.";
+        LOG.error(message);
+        throw new AmbariException(message);
+      }
+
+      File configFile = new File(dataDirectory, KerberosConfigDataFileWriter.DATA_FILE_NAME);
+      KerberosConfigDataFileWriter kerberosConfDataFileWriter = null;
+
+      actionLog.writeStdOut(String.format("Writing configuration changes metadata file to %s", configFile.getAbsolutePath()));
+      try {
+        kerberosConfDataFileWriter = kerberosConfigDataFileWriterFactory.createKerberosConfigDataFileWriter(configFile);
+
+        for (Map.Entry<String, Map<String, String>> entry : kerberosConfigurations.entrySet()) {
+          String type = entry.getKey();
+          Map<String, String> properties = entry.getValue();
+
+          if (properties != null) {
+            for (Map.Entry<String, String> configTypeEntry : properties.entrySet()) {
+              kerberosConfDataFileWriter.addRecord(type,
+                  configTypeEntry.getKey(),
+                  configTypeEntry.getValue(),
+                  KerberosConfigDataFileWriter.OPERATION_TYPE_SET);
+            }
+          }
+        }
+      } catch (IOException e) {
+        String message = String.format("Failed to write kerberos configurations file - %s", configFile.getAbsolutePath());
+        LOG.error(message, e);
+        actionLog.writeStdOut(message);
+        actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+        throw new AmbariException(message, e);
+      } finally {
+        if (kerberosConfDataFileWriter != null) {
+          try {
+            kerberosConfDataFileWriter.close();
+          } catch (IOException e) {
+            String message = "Failed to close the kerberos configurations file writer";
+            LOG.warn(message, e);
+            actionLog.writeStdOut(message);
+            actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+          }
+        }
+      }
+    }
+
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+  }
+
+  @Override
+  protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal, KerberosOperationHandler operationHandler, Map<String, String> kerberosConfiguration, Map<String, Object> requestSharedDataContext) throws AmbariException {
+    throw new UnsupportedOperationException();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
new file mode 100644
index 0000000..e39d868
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * PrepareKerberosIdentitiesServerAction is a ServerAction implementation that prepares metadata needed
+ * to process Kerberos identities (principals and keytabs files).
+ */
+public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerberosServerAction {
+  private final static Logger LOG = LoggerFactory.getLogger(PrepareKerberosIdentitiesServerAction.class);
+
+  /**
+   * KerberosHelper
+   */
+  @Inject
+  private KerberosHelper kerberosHelper;
+
+  /**
+   * Called to execute this action.  Upon invocation, calls
+   * {@link KerberosServerAction#processIdentities(Map)}
+   * to iterate through the Kerberos identity metadata and call
+   * {@link PrepareKerberosIdentitiesServerAction#processIdentities(Map)}
+   * for each identity to process.
+   *
+   * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+   *                                 to a given request
+   * @return a CommandReport indicating the result of this action
+   * @throws AmbariException
+   * @throws InterruptedException
+   */
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
+      AmbariException, InterruptedException {
+
+    Cluster cluster = getCluster();
+
+    if (cluster == null) {
+      throw new AmbariException("Missing cluster object");
+    }
+
+    KerberosDescriptor kerberosDescriptor = kerberosHelper.getKerberosDescriptor(cluster);
+    Collection<String> identityFilter = getIdentityFilter();
+    List<ServiceComponentHost> schToProcess = kerberosHelper.getServiceComponentHostsToProcess(cluster,
+        kerberosDescriptor,
+        getServiceComponentFilter(),
+        identityFilter,
+        new KerberosHelper.Command<Boolean, ServiceComponentHost>() {
+          @Override
+          public Boolean invoke(ServiceComponentHost sch) throws AmbariException {
+            return true;
+          }
+        });
+
+    Map<String, String> commandParameters = getCommandParameters();
+    String dataDirectory = getCommandParameterValue(commandParameters, DATA_DIRECTORY);
+    Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
+
+    int schCount = schToProcess.size();
+    if (schCount == 0) {
+      actionLog.writeStdOut("There are no components to process");
+    } else if (schCount == 1) {
+      actionLog.writeStdOut(String.format("Processing %d component", schCount));
+    } else {
+      actionLog.writeStdOut(String.format("Processing %d components", schCount));
+    }
+
+    processIdentities(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory, kerberosConfigurations);
+
+
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+  }
+
+  @Override
+  protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal, KerberosOperationHandler operationHandler, Map<String, String> kerberosConfiguration, Map<String, Object> requestSharedDataContext) throws AmbariException {
+    throw new UnsupportedOperationException();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 10204ea..2e2cc29 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -81,6 +81,17 @@ public interface Cluster {
   List<ServiceComponentHost> getServiceComponentHosts(String hostname);
 
   /**
+   * Get all ServiceComponentHosts for a given service and optional component
+   *
+   * If the component name is <code>null</code>, all components for the requested service will be returned.
+   *
+   * @param serviceName the name a the desired service
+   * @param componentName the name a the desired component - null indicates all components for the service
+   * @return a list of found ServiceComponentHost instances
+   */
+  List<ServiceComponentHost> getServiceComponentHosts(String serviceName, String componentName);
+
+  /**
    * Get all hosts associated with this cluster.
    *
    * @return collection of hosts that are associated with this cluster

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3acc7f0/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 186963f..8fa4034 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -797,6 +797,30 @@ public class ClusterImpl implements Cluster {
   }
 
   @Override
+  public List<ServiceComponentHost> getServiceComponentHosts(String serviceName, String componentName) {
+    ArrayList<ServiceComponentHost> foundItems = new ArrayList<ServiceComponentHost>();
+
+    loadServiceHostComponents();
+    clusterGlobalLock.readLock().lock();
+    try {
+      Map<String, Map<String, ServiceComponentHost>> foundByService = serviceComponentHosts.get(serviceName);
+      if (foundByService != null) {
+        if (componentName == null) {
+          for(Map<String, ServiceComponentHost> foundByComponent :foundByService.values()) {
+            foundItems.addAll(foundByComponent.values());
+          }
+        } else if (foundByService.containsKey(componentName)) {
+          foundItems.addAll(foundByService.get(componentName).values());
+        }
+      }
+    } finally {
+      clusterGlobalLock.readLock().unlock();
+    }
+
+    return foundItems;
+  }
+
+  @Override
   public void addService(Service service)
     throws AmbariException {
     loadServices();