You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2015/03/09 20:07:13 UTC

[1/2] ambari git commit: AMBARI-9937. Ambari must support deployment on separate host (rlevas)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 ba69c1d4d -> 8b4ef2b69


http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index d766d8c..1d52681 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -215,10 +215,10 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
     expect(kerberosEnvProperties.get("realm")).andReturn("EXAMPLE.COM").once();
+    expect(kerberosEnvProperties.get("kdc_host")).andReturn("10.0.100.1").once();
+    expect(kerberosEnvProperties.get("kadmin_host")).andReturn("10.0.100.1").once();
 
     final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
-    expect(krb5ConfProperties.get("kdc_host")).andReturn("10.0.100.1").once();
-    expect(krb5ConfProperties.get("kadmin_host")).andReturn("10.0.100.1").once();
 
     final Config krb5ConfConfig = createNiceMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
@@ -543,6 +543,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
+    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
 
     final KerberosKeytabDescriptor keytabDescriptor2 = createNiceMock(KerberosKeytabDescriptor.class);
     expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").once();
@@ -551,6 +552,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor2.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").once();
+    expect(keytabDescriptor2.isCachable()).andReturn(false).once();
 
     final KerberosIdentityDescriptor identityDescriptor1 = createNiceMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).once();
@@ -819,6 +821,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
+    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
 
     final KerberosKeytabDescriptor keytabDescriptor2 = createNiceMock(KerberosKeytabDescriptor.class);
     expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").once();
@@ -827,6 +830,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor2.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").once();
+    expect(keytabDescriptor2.isCachable()).andReturn(false).once();
 
     final KerberosIdentityDescriptor identityDescriptor1 = createNiceMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).once();
@@ -1106,6 +1110,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
+    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
 
     final KerberosKeytabDescriptor keytabDescriptor2 = createNiceMock(KerberosKeytabDescriptor.class);
     expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").once();
@@ -1114,6 +1119,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor2.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").once();
+    expect(keytabDescriptor2.isCachable()).andReturn(false).once();
 
     final KerberosIdentityDescriptor identityDescriptor1 = createNiceMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).once();
@@ -1489,6 +1495,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").times(3);
     expect(keytabDescriptor1.getGroupAccess()).andReturn("").times(3);
     expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").times(3);
+    expect(keytabDescriptor1.isCachable()).andReturn(false).times(3);
 
     final KerberosKeytabDescriptor keytabDescriptor3 = createMock(KerberosKeytabDescriptor.class);
     expect(keytabDescriptor3.getFile()).andReturn("${keytab_dir}/service3.keytab").once();
@@ -1497,6 +1504,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor3.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor3.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor3.getConfiguration()).andReturn("service3-site/component3.keytab.file").once();
+    expect(keytabDescriptor3.isCachable()).andReturn(false).times(1);
 
     final KerberosIdentityDescriptor identityDescriptor1a = createMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1a.getName()).andReturn("identity1a").anyTimes();
@@ -1773,6 +1781,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor1.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").once();
+    expect(keytabDescriptor1.isCachable()).andReturn(false).once();
 
     final KerberosKeytabDescriptor keytabDescriptor3 = createMock(KerberosKeytabDescriptor.class);
     expect(keytabDescriptor3.getFile()).andReturn("${keytab_dir}/service3.keytab").once();
@@ -1781,6 +1790,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(keytabDescriptor3.getGroupName()).andReturn("hadoop").once();
     expect(keytabDescriptor3.getGroupAccess()).andReturn("").once();
     expect(keytabDescriptor3.getConfiguration()).andReturn("service3-site/component3.keytab.file").once();
+    expect(keytabDescriptor3.isCachable()).andReturn(false).once();
 
     final KerberosIdentityDescriptor identityDescriptor1a = createMock(KerberosIdentityDescriptor.class);
     expect(identityDescriptor1a.getName()).andReturn("identity1a").anyTimes();

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
index 2da692e..d833c35 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
@@ -21,7 +21,6 @@ package org.apache.ambari.server.serveraction.kerberos;
 import junit.framework.Assert;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
-import org.easymock.EasyMockSupport;
 import org.easymock.IAnswer;
 import org.junit.Ignore;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileTest.java
index 25c7be7..b467760 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileTest.java
@@ -50,7 +50,7 @@ public class KerberosActionDataFileTest {
           "principal" + i, "principal_type" + i, "principalConfiguration" + i, "keytabFilePath" + i,
           "keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
           "keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
-          "keytabFileConfiguration" + i);
+          "keytabFileConfiguration" + i, "false");
     }
 
     // Add some odd characters
@@ -58,7 +58,7 @@ public class KerberosActionDataFileTest {
         "principal", "principal_type", "principalConfiguration", "keytabFilePath",
         "'keytabFileOwnerName'", "<keytabFileOwnerAccess>",
         "\"keytabFileGroupName\"", "keytab,File,Group,Access",
-        "\"keytab,'File',Configuration\"");
+        "\"keytab,'File',Configuration\"", "false");
 
     builder.close();
     Assert.assertTrue(builder.isClosed());
@@ -88,6 +88,7 @@ public class KerberosActionDataFileTest {
         Assert.assertEquals("keytabFileGroupName" + i, record.get(KerberosActionDataFile.KEYTAB_FILE_GROUP_NAME));
         Assert.assertEquals("keytabFileGroupAccess" + i, record.get(KerberosActionDataFile.KEYTAB_FILE_GROUP_ACCESS));
         Assert.assertEquals("keytabFileConfiguration" + i, record.get(KerberosActionDataFile.KEYTAB_FILE_CONFIGURATION));
+        Assert.assertEquals("false", record.get(KerberosActionDataFile.KEYTAB_FILE_IS_CACHABLE));
       } else {
         Assert.assertEquals("hostName's", record.get(KerberosActionDataFile.HOSTNAME));
         Assert.assertEquals("serviceName#", record.get(KerberosActionDataFile.SERVICE));
@@ -101,6 +102,7 @@ public class KerberosActionDataFileTest {
         Assert.assertEquals("\"keytabFileGroupName\"", record.get(KerberosActionDataFile.KEYTAB_FILE_GROUP_NAME));
         Assert.assertEquals("keytab,File,Group,Access", record.get(KerberosActionDataFile.KEYTAB_FILE_GROUP_ACCESS));
         Assert.assertEquals("\"keytab,'File',Configuration\"", record.get(KerberosActionDataFile.KEYTAB_FILE_CONFIGURATION));
+        Assert.assertEquals("false", record.get(KerberosActionDataFile.KEYTAB_FILE_IS_CACHABLE));
       }
 
       i++;
@@ -155,7 +157,7 @@ public class KerberosActionDataFileTest {
         "principal","principal_type", "principalConfiguration", "keytabFilePath",
         "keytabFileOwnerName", "keytabFileOwnerAccess",
         "keytabFileGroupName", "keytabFileGroupAccess",
-        "keytabFileConfiguration");
+        "keytabFileConfiguration", "true");
 
     builder.close();
     Assert.assertTrue(builder.isClosed());
@@ -181,7 +183,7 @@ public class KerberosActionDataFileTest {
         "principal", "principal_type", "principalConfiguration", "keytabFilePath",
         "keytabFileOwnerName", "keytabFileOwnerAccess",
         "keytabFileGroupName", "keytabFileGroupAccess",
-        "keytabFileConfiguration");
+        "keytabFileConfiguration", "true");
 
     builder.close();
     Assert.assertTrue(builder.isClosed());

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandlerTest.java
index e1d5fce..18d3bee 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandlerTest.java
@@ -28,7 +28,6 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
-import javax.naming.InvalidNameException;
 import java.io.File;
 import java.io.FileInputStream;
 import java.util.Collections;
@@ -206,6 +205,29 @@ public abstract class KerberosOperationHandlerTest extends EasyMockSupport {
   }
 
   @Test
+  public void testMergeKeytabs() throws KerberosOperationException {
+    KerberosOperationHandler handler = createHandler();
+
+    Keytab keytab1 = handler.createKeytab("principal@EXAMPLE.COM", "password", 1);
+    Keytab keytab2 = handler.createKeytab("principal@EXAMPLE.COM", "password1", 1);
+    Keytab keytab3 = handler.createKeytab("principal1@EXAMPLE.COM", "password", 4);
+
+    Keytab merged;
+
+    merged = handler.mergeKeytabs(keytab1, keytab2);
+    Assert.assertEquals(keytab1.getEntries().size(), merged.getEntries().size());
+
+    merged = handler.mergeKeytabs(keytab1, keytab3);
+    Assert.assertEquals(keytab1.getEntries().size() + keytab3.getEntries().size(), merged.getEntries().size());
+
+    merged = handler.mergeKeytabs(keytab2, keytab3);
+    Assert.assertEquals(keytab2.getEntries().size() + keytab3.getEntries().size(), merged.getEntries().size());
+
+    merged = handler.mergeKeytabs(keytab2, merged);
+    Assert.assertEquals(keytab2.getEntries().size() + keytab3.getEntries().size(), merged.getEntries().size());
+  }
+
+  @Test
   public void testTranslateEncryptionTypes() throws Exception {
     KerberosOperationHandler handler = createHandler();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
index 0b34a77..2efa640 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
@@ -105,7 +105,7 @@ public class KerberosServerActionTest {
           "principal|_HOST|_REALM" + i, "principal_type", "principalConfiguration" + i, "keytabFilePath" + i,
           "keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
           "keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
-          "keytabFileConfiguration" + i);
+          "keytabFileConfiguration" + i, "false");
     }
     builder.close();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
index 045cfbe..0898282 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
@@ -24,7 +24,6 @@ import com.google.inject.Injector;
 import junit.framework.Assert;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.utils.ShellCommandUtil;
 import org.easymock.EasyMock;
@@ -33,7 +32,6 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import java.lang.reflect.Field;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -52,6 +50,8 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
   private static final Map<String, String> KERBEROS_ENV_MAP = new HashMap<String, String>() {
     {
       put(MITKerberosOperationHandler.KERBEROS_ENV_ENCRYPTION_TYPES, null);
+      put(MITKerberosOperationHandler.KERBEROS_ENV_KDC_HOST, "localhost");
+      put(MITKerberosOperationHandler.KERBEROS_ENV_ADMIN_SERVER_HOST, "localhost");
     }
   };
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
index 6bb59c5..f902ba2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
@@ -99,7 +99,7 @@ public class UpdateKerberosConfigsServerActionTest {
     kerberosActionDataFileBuilder.addRecord("c6403.ambari.apache.org", "HDFS", "DATANODE",
       "dn/_HOST@_REALM", "service", "hdfs-site/dfs.namenode.kerberos.principal",
       "/etc/security/keytabs/dn.service.keytab",
-      "hdfs", "r", "hadoop", "", "hdfs-site/dfs.namenode.keytab.file");
+      "hdfs", "r", "hadoop", "", "hdfs-site/dfs.namenode.keytab.file", "false");
 
     kerberosActionDataFileBuilder.close();
     File hostDirectory = new File(dataDir, "c6403.ambari.apache.org");

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py b/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
index ecf7853..181c16d 100644
--- a/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
+++ b/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
@@ -21,12 +21,12 @@ import json
 
 krb5_conf_template = \
   '[libdefaults]\n' \
-  '  renew_lifetime = {{libdefaults_renew_lifetime}}\n' \
-  '  forwardable = {{libdefaults_forwardable}}\n' \
+  '  renew_lifetime = 7d\n' \
+  '  forwardable = true\n' \
   '  realm = {{realm|upper()}}\n' \
-  '  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n' \
-  '  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n' \
-  '  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n' \
+  '  ticket_lifetime = 24h\n' \
+  '  dns_lookup_realm = false\n' \
+  '  dns_lookup_kdc = false\n' \
   '\n' \
   '{% if domains %}\n' \
   '[domain_realm]\n' \
@@ -36,12 +36,10 @@ krb5_conf_template = \
   '{% endif %}\n' \
   '\n' \
   '[logging]\n' \
-  '  default = {{logging_default}}\n' \
-  '{#\n' \
-  ' # The following options are unused unless a managed KDC is installed\n' \
-  '  admin_server = {{logging_admin_server}}\n' \
-  'kdc = {{logging_admin_kdc}}\n' \
-  '#}\n' \
+  '  default = FILE:/var/log/krb5kdc.log\n' \
+  '  admin_server = FILE:/var/log/kadmind.log\n' \
+  '  kdc = FILE:/var/log/krb5kdc.log\n' \
+  '\n' \
   '[realms]\n' \
   '  {{realm}} = {\n' \
   '    admin_server = {{admin_server_host|default(kdc_host, True)}}\n' \
@@ -75,11 +73,11 @@ def get_manged_kdc_use_case():
 
   json_data['clusterHostInfo']['kdc_server_hosts'] = ['c6401.ambari.apache.org']
   json_data['configurations']['kerberos-env'] = {
-    'kdc_type': 'mit-kdc'
+    'kdc_type': 'mit-kdc',
+    'kdc_host': 'c6401.ambari.apache.org'
   }
   json_data['configurations']['krb5-conf'] = {
     'realm': 'MANAGED_REALM.COM',
-    'kdc_host': 'c6401.ambari.apache.org',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
   }
@@ -94,6 +92,7 @@ def get_unmanged_kdc_use_case():
     json_data = json.load(f)
 
   json_data['configurations']['kerberos-env'] = {
+    'kdc_host': 'ad.oscorp_industries.com',
     'kdc_type': 'mit-kdc'
   }
   json_data['configurations']['krb5-conf'] = {
@@ -101,7 +100,6 @@ def get_unmanged_kdc_use_case():
     'conf_file': 'krb5_unmanaged.conf',
     'content': krb5_conf_template,
     'realm': 'OSCORPINDUSTRIES.COM',
-    'kdc_host': 'ad.oscorp_industries.com',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
   }
@@ -125,12 +123,14 @@ def get_unmanged_krb5conf_use_case():
   json_data['configurations']['krb5-conf'] = {
     'realm': 'MANAGED_REALM.COM',
     'kdc_type': 'mit-kdc',
-    'kdc_host': 'c6401.ambari.apache.org',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop",
     'manage_krb5_conf': "false"
   }
-  json_data['configurations']['kerberos-env'] = { 'encryption_types' : 'aes256-cts-hmac-sha1-96'}
+  json_data['configurations']['kerberos-env'] = {
+    'kdc_host': 'c6401.ambari.apache.org',
+    'encryption_types' : 'aes256-cts-hmac-sha1-96'
+  }
 
   return json_data
 
@@ -140,6 +140,7 @@ def get_unmanged_ad_use_case():
     json_data = json.load(f)
 
   json_data['configurations']['kerberos-env'] = {
+    'kdc_host': 'ad.oscorp_industries.com',
     'kdc_type': 'active-directory',
   }
   json_data['configurations']['krb5-conf'] = {
@@ -147,7 +148,6 @@ def get_unmanged_ad_use_case():
     'conf_file': 'krb5_ad.conf',
     'content': krb5_conf_template,
     'realm': 'OSCORPINDUSTRIES.COM',
-    'kdc_host': 'ad.oscorp_industries.com',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
   }
@@ -173,12 +173,12 @@ def get_cross_realm_use_case():
 
   json_data['clusterHostInfo']['kdc_server_hosts'] = ['c6401.ambari.apache.org']
   json_data['configurations']['kerberos-env'] = {
+    'kdc_host': 'c6401.ambari.apache.org',
     'kdc_type': 'mit-kdc'
   }
   json_data['configurations']['krb5-conf'] = {
     'content': _krb5_conf_template,
     'realm': 'MANAGED_REALM.COM',
-    'kdc_host': 'c6401.ambari.apache.org',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
index b4e3c59..a8b5476 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
@@ -165,6 +165,8 @@
         }, 
         "kerberos-env": {
             "kdc_type": "mit-kdc",
+            "kdc_host": "c6406.ambari.apache.org",
+            "admin_server_host": "c6406.ambari.apache.org",
             "ldap_url": "",
             "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
             "container_dn": ""
@@ -997,23 +999,11 @@
             "hbase_principal_name": "hbase@EXAMPLE.COM"
         }, 
         "krb5-conf": {
-            "kdc_host": "c6406.ambari.apache.org", 
-            "admin_server_host": "c6406.ambari.apache.org", 
-            "realm": "EXAMPLE.COM", 
-            "libdefaults_forwardable": "true", 
-            "conf_dir": "/etc", 
-            "libdefaults_dns_lookup_kdc": "false", 
-            "logging_admin_server": "FILE:/var/log/kadmind.log", 
-            "libdefaults_default_tgs_enctypes": "aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4",
-            "content": "\n[libdefaults]\n  renew_lifetime = {{libdefaults_renew_lifetime}}\n  forwardable = {{libdefaults_forwardable}}\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = {{logging_default}}\n{#\n# The following options are unused unless a managed KDC is installed\n  admin_server = {{logging_admin_server}}\n  kdc = {{logging_admin_kdc}}\n#}\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ", 
-            "libdefaults_ticket_lifetime": "24h", 
-            "logging_kdc": "FILE:/var/log/krb5kdc.log", 
-            "domains": "", 
-            "logging_default": "FILE:/var/log/krb5libs.log", 
-            "libdefaults_dns_lookup_realm": "false",
-            "libdefaults_renew_lifetime": "7d", 
-            "libdefaults_default_tkt_enctypes": "aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4"
-        }, 
+            "realm": "EXAMPLE.COM",
+            "conf_dir": "/etc",
+            "content": "\n[libdefaults]\n  renew_lifetime = 7d\n  forwardable = true\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = 24h\n  dns_lookup_realm = false\n  dns_lookup_kdc = false\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = FILE:/var/log/krb5kdc.log\n  admin_server = FILE:/var/log/kadmind.log\n  kdc = FILE:/var/log/krb5kdc.log\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ",
+            "domains": ""
+        },
         "yarn-log4j": {
             "content": "\n#Relative to Yarn Log Dir Prefix\nyarn.log.dir=.\n#\n# Job Summary Appender\n#\n# Use following logger to send summary to separate file defined by\n# hadoop.mapreduce.jobsummary.log.file rolled daily:\n# hadoop.mapreduce.jobsummary.logger=INFO,JSA\n#\nhadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}\nhadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log\nlog4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender\n# Set the ResourceManager summary log filename\nyarn.server.resourcemanager.appsummary.log.file=hadoop-mapreduce.jobsummary.log\n# Set the ResourceManager summary log level and appender\nyarn.server.resourcemanager.appsummary.logger=${hadoop.root.logger}\n#yarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\n\n# To enable AppSummaryLogging for the RM,\n# set yarn.server.resourcemanager.appsummary.logger to\n# LEVEL,RMSUMMARY in hadoop-env.sh\n\n# Appender for ResourceManager Application Summary Log\n# Requires the 
 following properties to be set\n#    - hadoop.log.dir (Hadoop Log directory)\n#    - yarn.server.resourcemanager.appsummary.log.file (resource manager app summary log filename)\n#    - yarn.server.resourcemanager.appsummary.logger (resource manager app summary log level and appender)\nlog4j.appender.RMSUMMARY=org.apache.log4j.RollingFileAppender\nlog4j.appender.RMSUMMARY.File=${yarn.log.dir}/${yarn.server.resourcemanager.appsummary.log.file}\nlog4j.appender.RMSUMMARY.MaxFileSize=256MB\nlog4j.appender.RMSUMMARY.MaxBackupIndex=20\nlog4j.appender.RMSUMMARY.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n\nlog4j.appender.JSA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\nlog4j.appender.JSA.DatePattern=.yyyy-MM-dd\nlog4j.appender.JSA.layout=org.apache.log4j.PatternLayout\nlog4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$Applic
 ationSummary=${yarn.server.resourcemanager.appsummary.logger}\nlog4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false"
         }, 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
index 96d31b0..0804af1 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
@@ -165,6 +165,8 @@
         }, 
         "kerberos-env": {
             "kdc_type": "mit-kdc",
+            "kdc_host": "c6406.ambari.apache.org",
+            "admin_server_host": "c6406.ambari.apache.org",
             "ldap_url": "",
             "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
             "container_dn": ""
@@ -997,23 +999,11 @@
             "hbase_principal_name": "hbase@EXAMPLE.COM"
         }, 
         "krb5-conf": {
-            "kdc_host": "c6406.ambari.apache.org", 
-            "admin_server_host": "c6406.ambari.apache.org", 
-            "realm": "EXAMPLE.COM", 
-            "libdefaults_forwardable": "true", 
-            "conf_dir": "/etc", 
-            "libdefaults_dns_lookup_kdc": "false", 
-            "logging_admin_server": "FILE:/var/log/kadmind.log", 
-            "libdefaults_default_tgs_enctypes": "aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4",
-            "content": "\n[libdefaults]\n  renew_lifetime = {{libdefaults_renew_lifetime}}\n  forwardable = {{libdefaults_forwardable}}\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = {{logging_default}}\n{#\n# The following options are unused unless a managed KDC is installed\n  admin_server = {{logging_admin_server}}\n  kdc = {{logging_admin_kdc}}\n#}\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ", 
-            "libdefaults_ticket_lifetime": "24h", 
-            "logging_kdc": "FILE:/var/log/krb5kdc.log", 
-            "domains": "", 
-            "logging_default": "FILE:/var/log/krb5libs.log", 
-            "libdefaults_dns_lookup_realm": "false",
-            "libdefaults_renew_lifetime": "7d", 
-            "libdefaults_default_tkt_enctypes": "aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4"
-        }, 
+            "realm": "EXAMPLE.COM",
+            "conf_dir": "/etc",
+            "content": "\n[libdefaults]\n  renew_lifetime = 7d\n  forwardable = true\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = 24h\n  dns_lookup_realm = false\n  dns_lookup_kdc = false\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = FILE:/var/log/krb5kdc.log\n  admin_server = FILE:/var/log/kadmind.log\n  kdc = FILE:/var/log/krb5kdc.log\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ",
+            "domains": ""
+        },
         "yarn-log4j": {
             "content": "\n#Relative to Yarn Log Dir Prefix\nyarn.log.dir=.\n#\n# Job Summary Appender\n#\n# Use following logger to send summary to separate file defined by\n# hadoop.mapreduce.jobsummary.log.file rolled daily:\n# hadoop.mapreduce.jobsummary.logger=INFO,JSA\n#\nhadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}\nhadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log\nlog4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender\n# Set the ResourceManager summary log filename\nyarn.server.resourcemanager.appsummary.log.file=hadoop-mapreduce.jobsummary.log\n# Set the ResourceManager summary log level and appender\nyarn.server.resourcemanager.appsummary.logger=${hadoop.root.logger}\n#yarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\n\n# To enable AppSummaryLogging for the RM,\n# set yarn.server.resourcemanager.appsummary.logger to\n# LEVEL,RMSUMMARY in hadoop-env.sh\n\n# Appender for ResourceManager Application Summary Log\n# Requires the 
 following properties to be set\n#    - hadoop.log.dir (Hadoop Log directory)\n#    - yarn.server.resourcemanager.appsummary.log.file (resource manager app summary log filename)\n#    - yarn.server.resourcemanager.appsummary.logger (resource manager app summary log level and appender)\nlog4j.appender.RMSUMMARY=org.apache.log4j.RollingFileAppender\nlog4j.appender.RMSUMMARY.File=${yarn.log.dir}/${yarn.server.resourcemanager.appsummary.log.file}\nlog4j.appender.RMSUMMARY.MaxFileSize=256MB\nlog4j.appender.RMSUMMARY.MaxBackupIndex=20\nlog4j.appender.RMSUMMARY.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n\nlog4j.appender.JSA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\nlog4j.appender.JSA.DatePattern=.yyyy-MM-dd\nlog4j.appender.JSA.layout=org.apache.log4j.PatternLayout\nlog4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$Applic
 ationSummary=${yarn.server.resourcemanager.appsummary.logger}\nlog4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false"
         }, 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json b/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
index d23c908..db57cc4 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
@@ -143,8 +143,10 @@
             "container_dn": "", 
             "ldap_url": "", 
             "encryption_types": "aes des3-cbc-sha1 rc4 des-cbc-md5", 
-            "kdc_type": "mit-kdc"
-        }, 
+            "kdc_type": "mit-kdc",
+            "kdc_host": "c6401.ambari.apache.org",
+            "admin_server_host": "c6401.ambari.apache.org"
+        },
         "tez-site": {
             "tez.task.get-task.sleep.interval-ms.max": "200", 
             "tez.task.max-events-per-heartbeat": "500", 
@@ -280,23 +282,11 @@
             "mapred_log_dir_prefix": "/var/log/hadoop-mapreduce"
         }, 
         "krb5-conf": {
-            "kdc_host": "c6401.ambari.apache.org", 
-            "admin_server_host": "c6401.ambari.apache.org", 
-            "libdefaults_forwardable": "true", 
-            "conf_dir": "/etc", 
-            "libdefaults_dns_lookup_kdc": "false", 
-            "logging_admin_server": "FILE:/var/log/kadmind.log", 
-            "libdefaults_default_tgs_enctypes": "", 
-            "content": "\n[libdefaults]\n  renew_lifetime = {{libdefaults_renew_lifetime}}\n  forwardable = {{libdefaults_forwardable}}\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n  {% if libdefaults_default_tgs_enctypes %}\n  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}\n  {% endif %}\n  {% if libdefaults_default_tkt_enctypes %}\n  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}\n  {% endif %}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = {{logging_default}}\n{#\n# The following options are unused unless a managed KDC is installed\n  admin_server = {{logging_admin_server}}\n  kdc = {{logging_admin_kdc}}\n#}\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_hos
 t, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ", 
-            "libdefaults_ticket_lifetime": "24h", 
-            "logging_kdc": "FILE:/var/log/krb5kdc.log", 
-            "domains": "", 
-            "manage_krb5_conf": "true", 
-            "logging_default": "FILE:/var/log/krb5libs.log", 
-            "libdefaults_dns_lookup_realm": "false", 
-            "libdefaults_renew_lifetime": "7d", 
-            "libdefaults_default_tkt_enctypes": ""
-        }, 
+            "conf_dir": "/etc",
+            "content": "\n[libdefaults]\n  renew_lifetime = 7d\n  forwardable = true\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = 24h\n  dns_lookup_realm = false\n  dns_lookup_kdc = false\n  #default_tgs_enctypes = {{encryption_types}}\n  #default_tkt_enctypes = {{encryption_types}}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = FILE:/var/log/krb5kdc.log\n  admin_server = FILE:/var/log/kadmind.log\n  kdc = FILE:/var/log/krb5kdc.log\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ",
+            "domains": "",
+            "manage_krb5_conf": "true"
+        },
         "pig-properties": {
             "content": "\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\n# Pig default configuration file. All values can be overwritten by pig.properties and command line arguments.\n# see bin/pig -help\n\n# brief logging (no timestamps
 )\nbrief=false\n\n# debug level, INFO is default\ndebug=INFO\n\n# verbose print all log messages to screen (default to print only INFO and above to screen)\nverbose=false\n\n# exectype local|mapreduce, mapreduce is default\nexectype=mapreduce\n\n# Enable insertion of information about script into hadoop job conf \npig.script.info.enabled=true\n\n# Do not spill temp files smaller than this size (bytes)\npig.spill.size.threshold=5000000\n\n# EXPERIMENT: Activate garbage collection when spilling a file bigger than this size (bytes)\n# This should help reduce the number of files being spilled.\npig.spill.gc.activation.size=40000000\n\n# the following two parameters are to help estimate the reducer number\npig.exec.reducers.bytes.per.reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary location to store the intermediate data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin fragment files\npig.files.concatenation.threshold=100\npig.optimistic.files.concatenation=false;\n\npi
 g.disable.counter=false\n\n# Avoid pig failures when multiple jobs write to the same location\npig.location.check.strict=false\n\nhcat.bin=/usr/bin/hcat"
         }, 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
index 19ef81f..a236b36 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
@@ -153,23 +153,11 @@
             "mapreduce.admin.map.child.java.opts": "-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}"
         }, 
         "krb5-conf": {
-            "kdc_host": "c6407.ambari.apache.org", 
-            "admin_server_host": "c6407.ambari.apache.org", 
-            "libdefaults_forwardable": "true", 
-            "conf_dir": "/etc", 
-            "libdefaults_dns_lookup_kdc": "false", 
-            "logging_admin_server": "FILE:/var/log/kadmind.log", 
-            "libdefaults_default_tgs_enctypes": "", 
-            "content": "\n[libdefaults]\n  renew_lifetime = {{libdefaults_renew_lifetime}}\n  forwardable = {{libdefaults_forwardable}}\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n  {% if libdefaults_default_tgs_enctypes %}\n  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}\n  {% endif %}\n  {% if libdefaults_default_tkt_enctypes %}\n  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}\n  {% endif %}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = {{logging_default}}\n{#\n# The following options are unused unless a managed KDC is installed\n  admin_server = {{logging_admin_server}}\n  kdc = {{logging_admin_kdc}}\n#}\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_hos
 t, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ", 
-            "libdefaults_ticket_lifetime": "24h", 
-            "logging_kdc": "FILE:/var/log/krb5kdc.log", 
-            "domains": "", 
-            "manage_krb5_conf": "true", 
-            "logging_default": "FILE:/var/log/krb5libs.log", 
-            "libdefaults_dns_lookup_realm": "false", 
-            "libdefaults_renew_lifetime": "7d", 
-            "libdefaults_default_tkt_enctypes": ""
-        }, 
+            "conf_dir": "/etc",
+            "content": "\n[libdefaults]\n  renew_lifetime = 7d\n  forwardable = true\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = 24h\n  dns_lookup_realm = false\n  dns_lookup_kdc = false\n  #default_tgs_enctypes = {{encryption_types}}\n  #default_tkt_enctypes = {{encryption_types}}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = FILE:/var/log/krb5kdc.log\n  admin_server = FILE:/var/log/kadmind.log\n  kdc = FILE:/var/log/krb5kdc.log\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ",
+            "domains": "",
+            "manage_krb5_conf": "true"
+        },
         "kafka-log4j": {
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\nkafka.logs.dir=logs\n\nlog4j.rootLogger=INFO, stdout\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout=org.apache
 .log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log\nlog4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log\nlog4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-
 request.log\nlog4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log\nlog4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log\nlog4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\n# Turn on all our debugging info\n#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender\n#log4j
 .logger.kafka.client.ClientUtils=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender\n#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG\nlog4j.logger.kafka=INFO, kafkaAppender\nlog4j.logger.kafka.network.RequestChannel$=WARN, requestAppender\nlog4j.additivity.kafka.network.RequestChannel$=false\n\n#log4j.logger.kafka.network.Processor=TRACE, requestAppender\n#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender\n#log4j.additivity.kafka.server.KafkaApis=false\nlog4j.logger.kafka.request.logger=WARN, requestAppender\nlog4j.additivity.kafka.request.logger=false\n\nlog4j.logger.kafka.controller=TRACE, controllerAppender\nlog4j.additivity.kafka.controller=false\n\nlog4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender\nlog4j.additivity.kafka.log.LogCleaner=false\n\nlog4j.logger.state.change.logger=TRACE, stateChangeAppender\nlog4j.additivity.state.change.logger=false"
         }, 
@@ -204,7 +192,9 @@
             "realm": "EXAMPLE.COM", 
             "container_dn": "", 
             "ldap_url": "", 
-            "encryption_types": "aes des3-cbc-sha1 rc4 des-cbc-md5", 
+            "encryption_types": "aes des3-cbc-sha1 rc4 des-cbc-md5",
+            "kdc_host": "c6407.ambari.apache.org",
+            "admin_server_host": "c6407.ambari.apache.org",
             "kdc_type": "mit-kdc"
         }, 
         "ams-hbase-security-site": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
index f72be07..9ff52c8 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
@@ -153,23 +153,11 @@
             "mapreduce.admin.map.child.java.opts": "-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}"
         }, 
         "krb5-conf": {
-            "kdc_host": "c6407.ambari.apache.org", 
-            "admin_server_host": "c6407.ambari.apache.org", 
-            "libdefaults_forwardable": "true", 
-            "conf_dir": "/etc", 
-            "libdefaults_dns_lookup_kdc": "false", 
-            "logging_admin_server": "FILE:/var/log/kadmind.log", 
-            "libdefaults_default_tgs_enctypes": "", 
-            "content": "\n[libdefaults]\n  renew_lifetime = {{libdefaults_renew_lifetime}}\n  forwardable = {{libdefaults_forwardable}}\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n  {% if libdefaults_default_tgs_enctypes %}\n  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}\n  {% endif %}\n  {% if libdefaults_default_tkt_enctypes %}\n  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}\n  {% endif %}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = {{logging_default}}\n{#\n# The following options are unused unless a managed KDC is installed\n  admin_server = {{logging_admin_server}}\n  kdc = {{logging_admin_kdc}}\n#}\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_hos
 t, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ", 
-            "libdefaults_ticket_lifetime": "24h", 
-            "logging_kdc": "FILE:/var/log/krb5kdc.log", 
-            "domains": "", 
-            "manage_krb5_conf": "true", 
-            "logging_default": "FILE:/var/log/krb5libs.log", 
-            "libdefaults_dns_lookup_realm": "false", 
-            "libdefaults_renew_lifetime": "7d", 
-            "libdefaults_default_tkt_enctypes": ""
-        }, 
+            "conf_dir": "/etc",
+            "content": "\n[libdefaults]\n  renew_lifetime = 7d\n  forwardable = true\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = 24h\n  dns_lookup_realm = false\n  dns_lookup_kdc = false\n  #default_tgs_enctypes = {{encryption_types}}\n  #default_tkt_enctypes = {{encryption_types}}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains.split(',') %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = FILE:/var/log/krb5kdc.log\n  admin_server = FILE:/var/log/kadmind.log\n  kdc = FILE:/var/log/krb5kdc.log\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations below #}\n    ",
+            "domains": "",
+            "manage_krb5_conf": "true"
+        },
         "kafka-log4j": {
             "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\nkafka.logs.dir=logs\n\nlog4j.rootLogger=INFO, stdout\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout=org.apache
 .log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log\nlog4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log\nlog4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-
 request.log\nlog4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log\nlog4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log\nlog4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\n# Turn on all our debugging info\n#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender\n#log4j
 .logger.kafka.client.ClientUtils=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender\n#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG\nlog4j.logger.kafka=INFO, kafkaAppender\nlog4j.logger.kafka.network.RequestChannel$=WARN, requestAppender\nlog4j.additivity.kafka.network.RequestChannel$=false\n\n#log4j.logger.kafka.network.Processor=TRACE, requestAppender\n#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender\n#log4j.additivity.kafka.server.KafkaApis=false\nlog4j.logger.kafka.request.logger=WARN, requestAppender\nlog4j.additivity.kafka.request.logger=false\n\nlog4j.logger.kafka.controller=TRACE, controllerAppender\nlog4j.additivity.kafka.controller=false\n\nlog4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender\nlog4j.additivity.kafka.log.LogCleaner=false\n\nlog4j.logger.state.change.logger=TRACE, stateChangeAppender\nlog4j.additivity.state.change.logger=false"
         }, 
@@ -200,8 +188,10 @@
             "content": "\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO"
         }, 
         "kerberos-env": {
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
-            "realm": "EXAMPLE.COM", 
+            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
+            "kdc_host": "c6407.ambari.apache.org",
+            "admin_server_host": "c6407.ambari.apache.org",
+            "realm": "EXAMPLE.COM",
             "container_dn": "", 
             "ldap_url": "", 
             "encryption_types": "aes des3-cbc-sha1 rc4 des-cbc-md5", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json b/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
index b34c7b4..585d30e 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
@@ -13,7 +13,7 @@
         "service_name" : "KERBEROS",
         "stack_name" : "HDP",
         "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
+        "type" : "kerberos-env.xml"
       }
     },
     {
@@ -79,7 +79,7 @@
         "property_description" : "The jinja template for the kdc.conf file",
         "property_name" : "content",
         "property_type" : [ ],
-        "property_value" : "\n      [kdcdefaults]\n        kdc_ports = {{kdcdefaults_kdc_ports}}\n        kdc_tcp_ports = {{kdcdefaults_kdc_tcp_ports}}\n\n      [realms]\n        {{realm}} = {\n          acl_file = {{kadm5_acl_path}}\n          dict_file = /usr/share/dict/words\n          admin_keytab = {{kadm5_acl_dir}}/kadm5.keytab\n          supported_enctypes = {{libdefaults_default_tgs_enctypes}}\n      }\n\n      {# Append additional realm declarations should be placed below #}\n    ",
+        "property_value" : "\n      [kdcdefaults]\n        kdc_ports = {{kdcdefaults_kdc_ports}}\n        kdc_tcp_ports = {{kdcdefaults_kdc_tcp_ports}}\n\n      [realms]\n        {{realm}} = {\n          acl_file = {{kadm5_acl_path}}\n          dict_file = /usr/share/dict/words\n          admin_keytab = {{kadm5_acl_dir}}/kadm5.keytab\n          supported_enctypes = {{encryption_types}}\n      }\n\n      {# Append additional realm declarations should be placed below #}\n    ",
         "service_name" : "KERBEROS",
         "stack_name" : "HDP",
         "stack_version" : "2.2",
@@ -93,7 +93,7 @@
         "property_description" : "The jinja template for the krb5.conf file",
         "property_name" : "content",
         "property_type" : [ ],
-        "property_value" : "\n[libdefaults]\n  renew_lifetime = {{libdefaults_renew_lifetime}}\n  forwardable = {{libdefaults_forwardable}}\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = {{libdefaults_ticket_lifetime}}\n  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}\n  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = {{logging_default}}\n{#\n# The following options are unused unless a managed KDC is installed\n  admin_server = {{logging_admin_server}}\n  kdc = {{logging_admin_kdc}}\n#}\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations should be placed below #}\n    ",
+        "property_value" : "\n[libdefaults]\n  renew_lifetime = 7d\n  forwardable = true\n  default_realm = {{realm|upper()}}\n  ticket_lifetime = 24h\n  dns_lookup_realm = false\n  dns_lookup_kdc = false\n\n{% if domains %}\n[domain_realm]\n{% for domain in domains %}\n  {{domain}} = {{realm|upper()}}\n{% endfor %}\n{% endif %}\n\n[logging]\n  default = FILE:/var/log/krb5kdc.log\n  admin_server = FILE:/var/log/kadmind.log\n  kdc = FILE:/var/log/krb5kdc.log\n\n[realms]\n  {{realm}} = {\n    admin_server = {{admin_server_host|default(kdc_host, True)}}\n    kdc = {{kdc_host}}\n  }\n\n{# Append additional realm declarations should be placed below #}\n    ",
         "service_name" : "KERBEROS",
         "stack_name" : "HDP",
         "stack_version" : "2.2",
@@ -125,7 +125,7 @@
         "service_name" : "KERBEROS",
         "stack_name" : "HDP",
         "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
+        "type" : "kerberos-env.xml"
       }
     },
     {
@@ -171,146 +171,6 @@
       }
     },
     {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_default_tgs_enctypes",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : "\n      a space-delimited list of session key encryption types supported by the KDC or Active\n      Directory\n    ",
-        "property_name" : "libdefaults_default_tgs_enctypes",
-        "property_type" : [ ],
-        "property_value" : "\n      aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5\n      camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4\n    ",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_default_tkt_enctypes",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : "\n      a space-delimited list of session key encryption types supported by the KDC or Active\n      Directory\n    ",
-        "property_name" : "libdefaults_default_tkt_enctypes",
-        "property_type" : [ ],
-        "property_value" : "\n      aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5\n      camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4\n    ",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_dns_lookup_kdc",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "libdefaults_dns_lookup_kdc",
-        "property_type" : [ ],
-        "property_value" : "false",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_dns_lookup_realm",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "libdefaults_dns_lookup_realm",
-        "property_type" : [ ],
-        "property_value" : "false",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_forwardable",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "libdefaults_forwardable",
-        "property_type" : [ ],
-        "property_value" : "true",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_renew_lifetime",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "libdefaults_renew_lifetime",
-        "property_type" : [ ],
-        "property_value" : "7d",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/libdefaults_ticket_lifetime",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "libdefaults_ticket_lifetime",
-        "property_type" : [ ],
-        "property_value" : "24h",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/logging_admin_server",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "logging_admin_server",
-        "property_type" : [ ],
-        "property_value" : "FILE:/var/log/kadmind.log",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/logging_default",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "logging_default",
-        "property_type" : [ ],
-        "property_value" : "FILE:/var/log/krb5libs.log",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
-      "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/logging_kdc",
-      "StackConfigurations" : {
-        "final" : "false",
-        "property_description" : null,
-        "property_name" : "logging_kdc",
-        "property_type" : [ ],
-        "property_value" : "FILE:/var/log/krb5kdc.log",
-        "service_name" : "KERBEROS",
-        "stack_name" : "HDP",
-        "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
-      }
-    },
-    {
       "href" : "http://c6403.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/services/KERBEROS/configurations/realm",
       "StackConfigurations" : {
         "final" : "false",

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 3d1c3df..6762734 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -1888,7 +1888,7 @@ var hdp2properties = [
     "isRequiredByAgent": true,
     "displayType": "supportTextConnection",
     "serviceName": "KERBEROS",
-    "filename": "krb5-conf.xml",
+    "filename": "kerberos-env.xml",
     "category": "KDC",
     "index": 1
   },
@@ -1964,7 +1964,7 @@ var hdp2properties = [
     "isVisible": true,
     "isRequiredByAgent": true,
     "serviceName": "KERBEROS",
-    "filename": "krb5-conf.xml",
+    "filename": "kerberos-env.xml",
     "category": "Kadmin",
     "index": 0
   },
@@ -2037,36 +2037,6 @@ var hdp2properties = [
     "category": "Advanced krb5-conf",
     "index": 2
   },
-  {
-    "id": "puppet var",
-    "name": "libdefaults_default_tgs_enctypes",
-    "displayName": "libdefaults_default_tgs_enctypes",
-    "value": "",
-    "defaultValue": "",
-    "description": "",
-    "isOverridable": false,
-    "isVisible": true,
-    "isRequiredByAgent": true,
-    "isRequired": false,
-    "serviceName": "KERBEROS",
-    "filename": "krb5-conf.xml",
-    "category": "Advanced krb5-conf"
-  },
-  {
-    "id": "puppet var",
-    "name": "libdefaults_default_tkt_enctypes",
-    "displayName": "libdefaults_default_tkt_enctypes",
-    "value": "",
-    "defaultValue": "",
-    "description": "",
-    "isOverridable": false,
-    "isVisible": true,
-    "isRequiredByAgent": true,
-    "isRequired": false,
-    "serviceName": "KERBEROS",
-    "filename": "krb5-conf.xml",
-    "category": "Advanced krb5-conf"
-  },
 /********************************************* flume-agent *****************************/
   {
     "id": "site property",


[2/2] ambari git commit: AMBARI-9937. Ambari must support deployment on separate host (rlevas)

Posted by rl...@apache.org.
AMBARI-9937. Ambari must support deployment on separate host (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8b4ef2b6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8b4ef2b6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8b4ef2b6

Branch: refs/heads/branch-2.0.0
Commit: 8b4ef2b694b316a89f365d7329a9db4e2162f7c0
Parents: ba69c1d
Author: Robert Levas <rl...@hortonworks.com>
Authored: Mon Mar 9 15:06:55 2015 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Mon Mar 9 15:06:55 2015 -0400

----------------------------------------------------------------------
 ambari-server/conf/unix/ambari.properties       |   3 +
 ambari-server/conf/windows/ambari.properties    |   3 +
 ambari-server/pom.xml                           |   7 +
 .../server/configuration/Configuration.java     |  12 +
 .../server/controller/KerberosHelper.java       |  63 ++--
 .../kerberos/CreateKeytabFilesServerAction.java | 305 ++++++++++++++-----
 .../kerberos/DestroyPrincipalsServerAction.java |  17 +-
 .../kerberos/KerberosActionDataFile.java        |   1 +
 .../kerberos/KerberosActionDataFileBuilder.java |  11 +-
 .../kerberos/KerberosOperationHandler.java      | 228 ++++++++++----
 .../kerberos/KerberosServerAction.java          |   4 -
 .../kerberos/MITKerberosOperationHandler.java   |  27 ++
 .../kerberos/KerberosKeytabDescriptor.java      |  33 ++
 .../python/ambari_server/serverConfiguration.py |   2 +
 .../1.10.3-10/configuration/kerberos-env.xml    |  18 +-
 .../1.10.3-10/configuration/krb5-conf.xml       | 114 +------
 .../1.10.3-10/package/scripts/params.py         |  36 +--
 .../1.10.3-10/package/templates/krb5_conf.j2    |  27 +-
 .../KERBEROS/configuration/krb5-conf.xml        |  92 +-----
 .../services/KERBEROS/package/scripts/params.py |  39 +--
 .../KERBEROS/package/templates/krb5_conf.j2     |  27 +-
 .../server/agent/TestHeartbeatHandler.java      |   2 +-
 .../server/controller/KerberosHelperTest.java   |  14 +-
 .../ADKerberosOperationHandlerTest.java         |   1 -
 .../kerberos/KerberosActionDataFileTest.java    |  10 +-
 .../kerberos/KerberosOperationHandlerTest.java  |  24 +-
 .../kerberos/KerberosServerActionTest.java      |   2 +-
 .../MITKerberosOperationHandlerTest.java        |   4 +-
 .../UpdateKerberosConfigsServerActionTest.java  |   2 +-
 .../python/stacks/2.2/KERBEROS/use_cases.py     |  36 +--
 .../journalnode-upgrade-hdfs-secure.json        |  24 +-
 .../stacks/2.2/configs/journalnode-upgrade.json |  24 +-
 .../2.2/configs/pig-service-check-secure.json   |  28 +-
 .../2.2/configs/ranger-admin-upgrade.json       |  26 +-
 .../2.2/configs/ranger-usersync-upgrade.json    |  28 +-
 .../wizard/stack/hdp/version2.0.1/KERBEROS.json | 148 +--------
 ambari-web/app/data/HDP2/site_properties.js     |  34 +--
 37 files changed, 720 insertions(+), 756 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/conf/unix/ambari.properties
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/ambari.properties b/ambari-server/conf/unix/ambari.properties
index ec51278..251f068 100644
--- a/ambari-server/conf/unix/ambari.properties
+++ b/ambari-server/conf/unix/ambari.properties
@@ -66,6 +66,9 @@ server.execution.scheduler.maxThreads=5
 server.execution.scheduler.maxDbConnections=5
 server.execution.scheduler.misfire.toleration.minutes=480
 
+# Kerberos settings
+kerberos.keytab.cache.dir = /var/lib/ambari-server/data/cache
+
 # Default timeout in seconds before task is killed
 agent.task.timeout=900
 # Default timeout in seconds before package installation task is killed

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/conf/windows/ambari.properties
----------------------------------------------------------------------
diff --git a/ambari-server/conf/windows/ambari.properties b/ambari-server/conf/windows/ambari.properties
index ff69f67..cfe9c3d 100644
--- a/ambari-server/conf/windows/ambari.properties
+++ b/ambari-server/conf/windows/ambari.properties
@@ -50,6 +50,9 @@ server.execution.scheduler.maxThreads=5
 server.execution.scheduler.maxDbConnections=5
 server.execution.scheduler.misfire.toleration.minutes=480
 
+# Kerberos settings
+kerberos.keytab.cache.dir = data\\cache
+
 recommendations.dir=\\var\\run\\ambari-server\\stack-recommendations
 stackadvisor.script=resources\\scripts\\stack_advisor.py
 server.tmp.dir=\\var\\run\\ambari-server\\tmp

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 2bbb0ee..7a13936 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -486,6 +486,12 @@
               <groupname>root</groupname>
             </mapping>
             <mapping>
+              <directory>/var/lib/ambari-server/data/cache</directory>
+              <filemode>700</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+            </mapping>
+            <mapping>
               <directory>/var/lib/ambari-server/resources/apps</directory>
               <filemode>755</filemode>
               <username>root</username>
@@ -667,6 +673,7 @@
                 <path>/var/log/ambari-server</path>
                 <path>/var/lib/ambari-server/resources/upgrade</path>
                 <path>/var/lib/ambari-server/data/tmp</path>
+                <path>/var/lib/ambari-server/data/cache</path>
               </paths>
             </data>
             <!-- TODO: should be included all subdirs, if exists-->

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index c5595e6..8060c80 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -234,6 +234,8 @@ public class Configuration {
   public static final String KDC_PORT_KEY_DEFAULT = "88";
   public static final String KDC_CONNECTION_CHECK_TIMEOUT_KEY = "kdcserver.connection.check.timeout";
   public static final String KDC_CONNECTION_CHECK_TIMEOUT_DEFAULT = "10000";
+  public static final String KERBEROS_KEYTAB_CACHE_DIR_KEY = "kerberos.keytab.cache.dir";
+  public static final String KERBEROS_KEYTAB_CACHE_DIR_DEFAULT = "/var/lib/ambari-server/data/cache";
   /**
    * This key defines whether stages of parallel requests are executed in
    * parallel or sequentally. Only stages from different requests
@@ -1324,6 +1326,16 @@ public class Configuration {
   }
 
   /**
+   * Gets the directory where Ambari is to store cached keytab files.
+   *
+   * @return a File containing the path to the directory to use to store cached keytab files
+   */
+  public File getKerberosKeytabCacheDir() {
+    String fileName = properties.getProperty(KERBEROS_KEYTAB_CACHE_DIR_KEY, KERBEROS_KEYTAB_CACHE_DIR_DEFAULT);
+    return new File(fileName);
+  }
+
+  /**
    * Gets the type of database by examining the {@link #getDatabaseUrl()} JDBC
    * URL.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index e01d38d..cf73236 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -156,6 +156,9 @@ public class KerberosHelper {
   private ConfigHelper configHelper;
 
   @Inject
+  private Configuration configuration;
+
+  @Inject
   private KerberosOperationHandlerFactory kerberosOperationHandlerFactory;
 
   @Inject
@@ -191,7 +194,7 @@ public class KerberosHelper {
    * executed to complete this task; or null if no stages need to be executed.
    * @throws AmbariException
    * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
-   * Kerberos-specific configuration details
+   *                                               Kerberos-specific configuration details
    * @throws KerberosOperationException
    */
   public RequestStageContainer toggleKerberos(Cluster cluster, SecurityType securityType,
@@ -228,7 +231,7 @@ public class KerberosHelper {
    * @throws AmbariException
    * @throws KerberosOperationException
    * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
-   * Kerberos-specific configuration details
+   *                                               Kerberos-specific configuration details
    */
   public RequestStageContainer executeCustomOperations(Cluster cluster, Map<String, String> requestProperties,
                                                        RequestStageContainer requestStageContainer)
@@ -313,7 +316,7 @@ public class KerberosHelper {
       throws AmbariException, KerberosOperationException {
     return handle(cluster, getKerberosDetails(cluster), serviceComponentFilter, identityFilter,
         hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false));
- }
+  }
 
   /**
    * Deletes the set of filtered principals and keytabs from the cluster.
@@ -455,7 +458,6 @@ public class KerberosHelper {
    * Validate the KDC admin credentials.
    *
    * @param cluster associated cluster
-   *
    * @throws AmbariException if any other error occurs while trying to validate the credentials
    */
   public void validateKDCCredentials(Cluster cluster) throws KerberosMissingAdminCredentialsException,
@@ -641,28 +643,28 @@ public class KerberosHelper {
    * need to be done.  Calls into the Handler implementation to provide guidance and set up stages
    * to perform the work needed to complete the relative action.
    *
-   * @param cluster                the relevant Cluster
-   * @param kerberosDetails        a KerberosDetails containing information about relevant Kerberos configuration
-   * @param serviceComponentFilter a Map of service names to component names indicating the relevant
-   *                               set of services and components - if null, no filter is relevant;
-   *                               if empty, the filter indicates no relevant services or components
-   * @param identityFilter         a Collection of identity names indicating the relevant identities -
-   *                               if null, no filter is relevant; if empty, the filter indicates no
-   *                               relevant identities
-   * @param requestStageContainer  a RequestStageContainer to place generated stages, if needed -
-   *                               if null a new RequestStageContainer will be created.
+   * @param cluster                        the relevant Cluster
+   * @param kerberosDetails                a KerberosDetails containing information about relevant Kerberos configuration
+   * @param serviceComponentFilter         a Map of service names to component names indicating the relevant
+   *                                       set of services and components - if null, no filter is relevant;
+   *                                       if empty, the filter indicates no relevant services or components
+   * @param identityFilter                 a Collection of identity names indicating the relevant identities -
+   *                                       if null, no filter is relevant; if empty, the filter indicates no
+   *                                       relevant identities
+   * @param requestStageContainer          a RequestStageContainer to place generated stages, if needed -
+   *                                       if null a new RequestStageContainer will be created.
    * @param hostsToForceKerberosOperations a set of host names on which it is expected that the
    *                                       Kerberos client is or will be in the INSTALLED state by
    *                                       the time the operations targeted for them are to be
    *                                       executed - if empty or null, this no hosts will be
    *                                       "forced"
-   * @param handler                a Handler to use to provide guidance and set up stages
-   *                               to perform the work needed to complete the relative action
+   * @param handler                        a Handler to use to provide guidance and set up stages
+   *                                       to perform the work needed to complete the relative action
    * @return the updated or a new RequestStageContainer containing the stages that need to be
    * executed to complete this task; or null if no stages need to be executed.
    * @throws AmbariException
    * @throws KerberosInvalidConfigurationException if an issue occurs trying to get the
-   * Kerberos-specific configuration details
+   *                                               Kerberos-specific configuration details
    */
   @Transactional
   private RequestStageContainer handle(Cluster cluster,
@@ -695,7 +697,7 @@ public class KerberosHelper {
 
         // Ensure that that hosts that should be assumed to be in the correct state when needed are
         // in the hostsWithValidKerberosClient collection.
-        if(hostsToForceKerberosOperations != null) {
+        if (hostsToForceKerberosOperations != null) {
           hostsWithValidKerberosClient.addAll(hostsToForceKerberosOperations);
         }
 
@@ -733,7 +735,7 @@ public class KerberosHelper {
                 // If the current ServiceComponentHost represents the KERBEROS/KERBEROS_CLIENT and
                 // indicates that the KERBEROS_CLIENT component is in the INSTALLED state, add the
                 // current host to the set of hosts that should be handled...
-                if(Service.Type.KERBEROS.name().equals(serviceName) &&
+                if (Service.Type.KERBEROS.name().equals(serviceName) &&
                     Role.KERBEROS_CLIENT.name().equals(componentName) &&
                     (sch.getState() == State.INSTALLED)) {
                   hostsWithValidKerberosClient.add(hostname);
@@ -915,7 +917,7 @@ public class KerberosHelper {
                                                    Map<String, String> commandParameters, RequestStageContainer requestStageContainer,
                                                    Handler handler) throws AmbariException, KerberosOperationException {
 
-    if(commandParameters == null) {
+    if (commandParameters == null) {
       throw new AmbariException("The properties map must not be null.  It is needed to store data related to the service check identity");
     }
 
@@ -969,6 +971,8 @@ public class KerberosHelper {
                       put("name", "${cluster-env/user_group}");
                       put("access", "r");
                     }});
+
+                    put("cachable", "false");
                   }
                 });
           }
@@ -1005,7 +1009,7 @@ public class KerberosHelper {
                 // If the current ServiceComponentHost represents the KERBEROS/KERBEROS_CLIENT and
                 // indicates that the KERBEROS_CLIENT component is in the INSTALLED state, add the
                 // current host to the set of hosts that should be handled...
-                if(Service.Type.KERBEROS.name().equals(serviceName) &&
+                if (Service.Type.KERBEROS.name().equals(serviceName) &&
                     Role.KERBEROS_CLIENT.name().equals(componentName) &&
                     (sch.getState() == State.INSTALLED)) {
                   hostsWithValidKerberosClient.add(hostname);
@@ -1023,7 +1027,7 @@ public class KerberosHelper {
 
                   if (identitiesAdded > 0) {
                     // Add the relevant principal name and keytab file data to the command params state
-                    if(!commandParameters.containsKey("principal_name") || !commandParameters.containsKey("keytab_file")) {
+                    if (!commandParameters.containsKey("principal_name") || !commandParameters.containsKey("keytab_file")) {
                       commandParameters.put("principal_name",
                           KerberosDescriptor.replaceVariables(identity.getPrincipalDescriptor().getValue(), configurations));
                       commandParameters.put("keytab_file",
@@ -1154,7 +1158,7 @@ public class KerberosHelper {
 
     KDCType kdcType;
     String kdcTypeProperty = kerberosEnvProperties.get("kdc_type");
-    if(kdcTypeProperty == null) {
+    if (kdcTypeProperty == null) {
       String message = "The 'kerberos-env/kdc_type' value must be set to a valid KDC type";
       LOG.error(message);
       throw new KerberosInvalidConfigurationException(message);
@@ -1290,7 +1294,12 @@ public class KerberosHelper {
    * @throws AmbariException if a new temporary directory cannot be created
    */
   private File createTemporaryDirectory() throws AmbariException {
-    String tempDirectoryPath = System.getProperty("java.io.tmpdir");
+    String tempDirectoryPath = configuration.getProperty(Configuration.SERVER_TMP_DIR_KEY);
+
+    if ((tempDirectoryPath == null) || tempDirectoryPath.isEmpty()) {
+      tempDirectoryPath = System.getProperty("java.io.tmpdir");
+    }
+
     try {
       if (tempDirectoryPath == null) {
         throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
@@ -1316,8 +1325,7 @@ public class KerberosHelper {
       }
 
       return directory;
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       String message = "Failed to create the temporary data directory.";
       LOG.error(message, e);
       throw new AmbariException(message, e);
@@ -1451,7 +1459,8 @@ public class KerberosHelper {
                 keytabFileOwnerAccess,
                 keytabFileGroupName,
                 keytabFileGroupAccess,
-                keytabFileConfiguration);
+                keytabFileConfiguration,
+                (keytabDescriptor.isCachable()) ? "true" : "false");
 
             identitiesAdded++;
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
index 6ea33b0..3e94cd6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
@@ -22,20 +22,25 @@ import com.google.inject.Inject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
 import org.apache.ambari.server.orm.entities.KerberosPrincipalEntity;
 import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.io.FileUtils;
+import org.apache.directory.server.kerberos.shared.keytab.Keytab;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
 
 import static org.apache.ambari.server.serveraction.kerberos.KerberosActionDataFile.HOSTNAME;
+import static org.apache.ambari.server.serveraction.kerberos.KerberosActionDataFile.KEYTAB_FILE_IS_CACHABLE;
 import static org.apache.ambari.server.serveraction.kerberos.KerberosActionDataFile.KEYTAB_FILE_PATH;
 
 /**
@@ -64,6 +69,18 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
   private KerberosPrincipalHostDAO kerberosPrincipalHostDAO;
 
   /**
+   * Configuration used to get the configured properties such as the keytab file cache directory
+   */
+  @Inject
+  private Configuration configuration;
+
+  /**
+   * A map of data used to track what has been processed in order to optimize the creation of keytabs
+   * such as knowing when to create a cached keytab file or use a cached keytab file.
+   */
+  Map<String, Set<String>> visitedIdentities = new HashMap<String, Set<String>>();
+
+  /**
    * Called to execute this action.  Upon invocation, calls
    * {@link org.apache.ambari.server.serveraction.kerberos.KerberosServerAction#processIdentities(java.util.Map)} )}
    * to iterate through the Kerberos identity metadata and call
@@ -126,9 +143,7 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
     CommandReport commandReport = null;
 
     if (identityRecord != null) {
-      String message = String.format("Creating keytab file for %s", evaluatedPrincipal);
-      LOG.info(message);
-      actionLog.writeStdOut(message);
+      String message;
 
       if (operationHandler == null) {
         message = String.format("Failed to create keytab file for %s, missing KerberosOperationHandler", evaluatedPrincipal);
@@ -143,84 +158,157 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
         String keytabFilePath = identityRecord.get(KEYTAB_FILE_PATH);
 
         if ((host != null) && !host.isEmpty() && (keytabFilePath != null) && !keytabFilePath.isEmpty()) {
-          // Look up the current evaluatedPrincipal's password.
-          // If found create th keytab file, else skip it.
-          String password = principalPasswordMap.get(evaluatedPrincipal);
-
-          // Determine where to store the keytab file.  It should go into a host-specific
-          // directory under the previously determined data directory.
-          File hostDirectory = new File(getDataDirectoryPath(), host);
-
-          // Ensure the host directory exists...
-          if (hostDirectory.exists() || hostDirectory.mkdirs()) {
-            File keytabFile = new File(hostDirectory, DigestUtils.sha1Hex(keytabFilePath));
-
-            if (password == null) {
-              if (kerberosPrincipalHostDAO.exists(evaluatedPrincipal, host)) {
-                // There is nothing to do for this since it must already exist and we don't want to
-                // regenerate the keytab
-                message = String.format("Skipping keytab file for %s, missing password indicates nothing to do", evaluatedPrincipal);
-                LOG.debug(message);
-              } else {
-                KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
-                String cachedKeytabPath = (principalEntity == null) ? null : principalEntity.getCachedKeytabPath();
-
-                if (cachedKeytabPath == null) {
-                  message = String.format("Failed to create keytab file for %s, missing password", evaluatedPrincipal);
-                  actionLog.writeStdErr(message);
-                  LOG.error(message);
-                  commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+          Set<String> visitedPrincipalKeys = visitedIdentities.get(evaluatedPrincipal);
+          String visitationKey = String.format("%s|%s", host, keytabFilePath);
+
+          if ((visitedPrincipalKeys == null) || !visitedPrincipalKeys.contains(visitationKey)) {
+            // Look up the current evaluatedPrincipal's password.
+            // If found create the keytab file, else try to find it in the cache.
+            String password = principalPasswordMap.get(evaluatedPrincipal);
+
+            message = String.format("Creating keytab file for %s on host %s", evaluatedPrincipal, host);
+            LOG.info(message);
+            actionLog.writeStdOut(message);
+
+            // Determine where to store the keytab file.  It should go into a host-specific
+            // directory under the previously determined data directory.
+            File hostDirectory = new File(getDataDirectoryPath(), host);
+
+            // Ensure the host directory exists...
+            if (!hostDirectory.exists() && hostDirectory.mkdirs()) {
+              // Make sure only Ambari has access to this directory.
+              ensureAmbariOnlyAccess(hostDirectory);
+            }
+
+            if (hostDirectory.exists()) {
+              File destinationKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(keytabFilePath));
+
+              if (password == null) {
+                if (kerberosPrincipalHostDAO.exists(evaluatedPrincipal, host)) {
+                  // There is nothing to do for this since it must already exist and we don't want to
+                  // regenerate the keytab
+                  message = String.format("Skipping keytab file for %s, missing password indicates nothing to do", evaluatedPrincipal);
+                  LOG.debug(message);
                 } else {
+                  KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
+                  String cachedKeytabPath = (principalEntity == null) ? null : principalEntity.getCachedKeytabPath();
+
+                  if (cachedKeytabPath == null) {
+                    message = String.format("Failed to create keytab for %s, missing cached file", evaluatedPrincipal);
+                    actionLog.writeStdErr(message);
+                    LOG.error(message);
+                    commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+                  } else {
+                    try {
+                      operationHandler.createKeytabFile(new File(cachedKeytabPath), destinationKeytabFile);
+                    } catch (KerberosOperationException e) {
+                      message = String.format("Failed to create keytab file for %s - %s", evaluatedPrincipal, e.getMessage());
+                      actionLog.writeStdErr(message);
+                      LOG.error(message, e);
+                      commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+                    }
+                  }
+                }
+              } else {
+                Keytab keytab = null;
+
+                // Possibly get the keytab from the cache
+                if (visitedPrincipalKeys != null) {
+                  // Since we have visited this principal before, attempt to pull the keytab from the
+                  // cache...
+                  KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
+                  String cachedKeytabPath = (principalEntity == null) ? null : principalEntity.getCachedKeytabPath();
+
+                  if (cachedKeytabPath != null) {
+                    try {
+                      keytab = Keytab.read(new File(cachedKeytabPath));
+                    } catch (IOException e) {
+                      message = String.format("Failed to read the cached keytab for %s, recreating if possible - %s",
+                          evaluatedPrincipal, e.getMessage());
+
+                      if (LOG.isDebugEnabled()) {
+                        LOG.warn(message, e);
+                      } else {
+                        LOG.warn(message, e);
+                      }
+                    }
+                  }
+                }
+
+                // If the keytab was not retrieved from the cache... create it.
+                if (keytab == null) {
+                  Integer keyNumber = principalKeyNumberMap.get(evaluatedPrincipal);
+
                   try {
-                    FileUtils.copyFile(new File(cachedKeytabPath), keytabFile);
-                    message = String.format("Using cached keytab file for %s at %s", evaluatedPrincipal, keytabFile.getAbsolutePath());
-                    LOG.debug(message);
-                  } catch (IOException e) {
-                    message = String.format("Failed to use cached keytab file for %s at %s: %s", evaluatedPrincipal, keytabFile.getAbsolutePath(), e.getMessage());
+                    keytab = operationHandler.createKeytab(evaluatedPrincipal, password, keyNumber);
+
+                    // If the current identity does not represent a service, copy it to a secure location
+                    // and store that location so it can be reused rather than recreate it.
+                    KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
+                    if (principalEntity != null) {
+                      if (!principalEntity.isService() && ("true".equalsIgnoreCase(identityRecord.get(KEYTAB_FILE_IS_CACHABLE)))) {
+                        File cachedKeytabFile = cacheKeytab(evaluatedPrincipal, keytab);
+                        String previousCachedFilePath = principalEntity.getCachedKeytabPath();
+                        String cachedKeytabFilePath = ((cachedKeytabFile == null) || !cachedKeytabFile.exists())
+                            ? null
+                            : cachedKeytabFile.getAbsolutePath();
+
+                        principalEntity.setCachedKeytabPath(cachedKeytabFilePath);
+                        kerberosPrincipalDAO.merge(principalEntity);
+
+                        if(previousCachedFilePath != null) {
+                          if(!new File(previousCachedFilePath).delete()) {
+                            LOG.debug(String.format("Failed to remove orphaned cache file %s", previousCachedFilePath));
+                          }
+                        }
+                      }
+                    }
+                  } catch (KerberosOperationException e) {
+                    message = String.format("Failed to create keytab file for %s - %s", evaluatedPrincipal, e.getMessage());
                     actionLog.writeStdErr(message);
-                    LOG.warn(message);
+                    LOG.error(message, e);
                     commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
                   }
                 }
-              }
-            } else {
-              Integer keyNumber = principalKeyNumberMap.get(evaluatedPrincipal);
 
-              try {
-                if (operationHandler.createKeytabFile(evaluatedPrincipal, password, keyNumber, keytabFile)) {
-                  message = String.format("Successfully created keytab file for %s at %s", evaluatedPrincipal, keytabFile.getAbsolutePath());
-                  LOG.debug(message);
+                if (keytab != null) {
+                  try {
+                    if (operationHandler.createKeytabFile(keytab, destinationKeytabFile)) {
+                      ensureAmbariOnlyAccess(destinationKeytabFile);
 
-                  // If the current identity does not represent a service, store the location of the
-                  // keytab file so it can be reused rather than recreate it.
-                  // Note: for now we are using the keytab's destination directory on the Ambari
-                  // server
-                  KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
-                  if (principalEntity != null) {
-                    if (!principalEntity.isService()) {
-                      principalEntity.setCachedKeytabPath(keytabFilePath);
-                      kerberosPrincipalDAO.merge(principalEntity);
+                      message = String.format("Successfully created keytab file for %s at %s", evaluatedPrincipal, destinationKeytabFile.getAbsolutePath());
+                      LOG.debug(message);
+                    } else {
+                      message = String.format("Failed to create keytab file for %s at %s", evaluatedPrincipal, destinationKeytabFile.getAbsolutePath());
+                      actionLog.writeStdErr(message);
+                      LOG.error(message);
+                      commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
                     }
+                  } catch (KerberosOperationException e) {
+                    message = String.format("Failed to create keytab file for %s - %s", evaluatedPrincipal, e.getMessage());
+                    actionLog.writeStdErr(message);
+                    LOG.error(message, e);
+                    commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
                   }
-                } else {
-                  message = String.format("Failed to create keytab file for %s at %s", evaluatedPrincipal, keytabFile.getAbsolutePath());
-                  actionLog.writeStdErr(message);
-                  LOG.error(message);
-                  commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
                 }
-              } catch (KerberosOperationException e) {
-                message = String.format("Failed to create keytab file for %s - %s", evaluatedPrincipal, e.getMessage());
-                actionLog.writeStdErr(message);
-                LOG.error(message, e);
-                commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
               }
+            } else {
+              message = String.format("Failed to create keytab file for %s, the container directory does not exist: %s",
+                  evaluatedPrincipal, hostDirectory.getAbsolutePath());
+              actionLog.writeStdErr(message);
+              LOG.error(message);
+              commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
             }
-          } else {
-            message = String.format("Failed to create keytab file for %s, the container directory does not exist: %s",
-                evaluatedPrincipal, hostDirectory.getAbsolutePath());
-            actionLog.writeStdErr(message);
-            LOG.error(message);
-            commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+
+            if(visitedPrincipalKeys == null) {
+              visitedPrincipalKeys = new HashSet<String>();
+              visitedIdentities.put(evaluatedPrincipal, visitedPrincipalKeys);
+            }
+
+            visitedPrincipalKeys.add(visitationKey);
+          }
+          else {
+            LOG.debug(String.format("Skipping previously processed keytab for %s on host %s", evaluatedPrincipal, host));
           }
         }
       }
@@ -228,4 +316,83 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
 
     return commandReport;
   }
+
+  /**
+   * Cache a keytab given its relative principal name and the keytab data.
+   * <p/>
+   * The specified keytab is stored in a file in a location derived using the configured keytab
+   * cache directory and the seeded hash of the principal name - this is to add a slight level
+   * of obscurity so that it cannot be determined what keytab data is in the file based on its name.
+   * The file is the set readable by only the Ambari server process owner.
+   *
+   * @param principal the principal name related to the keytab data
+   * @param keytab    the keytab data to cache
+   * @return a File pointing to the cached keytab file
+   * @throws AmbariException if a failure occurs while creating the cache file containing the the keytab data
+   */
+  private File cacheKeytab(String principal, Keytab keytab) throws AmbariException {
+    File cacheDirectory = configuration.getKerberosKeytabCacheDir();
+
+    if (cacheDirectory == null) {
+      String message = "The Kerberos keytab cache directory is not configured in the Ambari properties";
+      LOG.error(message);
+      throw new AmbariException(message);
+    }
+
+    if (!cacheDirectory.exists()) {
+      // If the cache directory does not exist, create it and ensure only Ambari has access to it
+      if (cacheDirectory.mkdirs()) {
+        ensureAmbariOnlyAccess(cacheDirectory);
+
+        if (!cacheDirectory.exists()) {
+          String message = String.format("Failed to create the keytab cache directory %s",
+              cacheDirectory.getAbsolutePath());
+          LOG.error(message);
+          throw new AmbariException(message);
+        }
+      }
+    }
+
+    File cachedKeytabFile = new File(cacheDirectory, DigestUtils.sha1Hex(principal + String.valueOf(System.currentTimeMillis())));
+
+    try {
+      keytab.write(cachedKeytabFile);
+      ensureAmbariOnlyAccess(cachedKeytabFile);
+    } catch (IOException e) {
+      String message = String.format("Failed to write the keytab for %s to the cache location (%s)",
+          principal, cachedKeytabFile.getAbsolutePath());
+      LOG.error(message, e);
+      throw new AmbariException(message, e);
+    }
+
+    return cachedKeytabFile;
+  }
+
+  /**
+   * Ensures that the owner of the Ambari server process is the only local user account able to
+   * read and write to the specified file or read, write to, and execute the specified directory.
+   *
+   * @param file the file or directory for which to modify access
+   */
+  private void ensureAmbariOnlyAccess(File file) {
+    if (file.exists()) {
+      if (!file.setReadable(false, false) || !file.setReadable(true, true)) {
+        LOG.warn(String.format("Failed to set %s readable only by Ambari", file.getAbsolutePath()));
+      }
+
+      if (!file.setWritable(false, false) || !file.setWritable(true, true)) {
+        LOG.warn(String.format("Failed to set %s writable only by Ambari", file.getAbsolutePath()));
+      }
+
+      if (file.isDirectory()) {
+        if (!file.setExecutable(false, false) && !file.setExecutable(true, true)) {
+          LOG.warn(String.format("Failed to set %s executable by Ambari", file.getAbsolutePath()));
+        }
+      } else {
+        if (!file.setExecutable(false, false)) {
+          LOG.warn(String.format("Failed to set %s not executable", file.getAbsolutePath()));
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
index caf8c78..a215a56 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
@@ -22,9 +22,11 @@ import com.google.inject.Inject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
+import org.apache.ambari.server.orm.entities.KerberosPrincipalEntity;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
 import java.util.Map;
 import java.util.concurrent.ConcurrentMap;
 
@@ -95,7 +97,20 @@ public class DestroyPrincipalsServerAction extends KerberosServerAction {
     }
 
     try {
-      kerberosPrincipalDAO.remove(evaluatedPrincipal);
+      KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
+
+      if(principalEntity != null) {
+        String cachedKeytabPath = principalEntity.getCachedKeytabPath();
+
+        kerberosPrincipalDAO.remove(principalEntity);
+
+        // If a cached  keytabs file exists for this principal, delete it.
+        if (cachedKeytabPath != null) {
+          if (!new File(cachedKeytabPath).delete()) {
+            LOG.debug(String.format("Failed to remove cached keytab for %s", evaluatedPrincipal));
+          }
+        }
+      }
     }
     catch (Throwable t) {
       message = String.format("Failed to remove identity for %s from the Ambari database - %s", evaluatedPrincipal, t.getMessage());

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFile.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFile.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFile.java
index 40b3353..e85048d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFile.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFile.java
@@ -37,4 +37,5 @@ public class KerberosActionDataFile {
   public static final String KEYTAB_FILE_GROUP_NAME = "keytab_file_group_name";
   public static final String KEYTAB_FILE_GROUP_ACCESS = "keytab_file_group_access";
   public static final String KEYTAB_FILE_CONFIGURATION = "keytab_file_configuration";
+  public static final String KEYTAB_FILE_IS_CACHABLE = "keytab_file_is_cachable";
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileBuilder.java
index 8888f82..31e62be 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosActionDataFileBuilder.java
@@ -70,13 +70,16 @@ public class KerberosActionDataFileBuilder extends AbstractKerberosDataFileBuild
    *                                (expected to be the type and name of the configuration property
    *                                to use to store the keytab file's absolute path in
    *                                - i.e., config-type/property)
+   * @param keytabFileCanCache      a String containing a boolean value (true, false) indicating
+   *                                whether the generated keytab can be cached or not
    * @throws IOException
    */
   public void addRecord(String hostName, String serviceName, String serviceComponentName,
                         String principal, String principalType, String principalConfiguration,
                         String keytabFilePath, String keytabFileOwnerName,
                         String keytabFileOwnerAccess, String keytabFileGroupName,
-                        String keytabFileGroupAccess, String keytabFileConfiguration)
+                        String keytabFileGroupAccess, String keytabFileConfiguration,
+                        String keytabFileCanCache)
       throws IOException {
     super.appendRecord(hostName,
         serviceName,
@@ -89,7 +92,8 @@ public class KerberosActionDataFileBuilder extends AbstractKerberosDataFileBuild
         keytabFileOwnerAccess,
         keytabFileGroupName,
         keytabFileGroupAccess,
-        keytabFileConfiguration);
+        keytabFileConfiguration,
+        keytabFileCanCache);
   }
 
   @Override
@@ -105,6 +109,7 @@ public class KerberosActionDataFileBuilder extends AbstractKerberosDataFileBuild
         KEYTAB_FILE_OWNER_ACCESS,
         KEYTAB_FILE_GROUP_NAME,
         KEYTAB_FILE_GROUP_ACCESS,
-        KEYTAB_FILE_CONFIGURATION);
+        KEYTAB_FILE_CONFIGURATION,
+        KEYTAB_FILE_IS_CACHABLE);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
index b62f6f9..d5384d2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
@@ -40,6 +40,7 @@ import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -80,6 +81,16 @@ public abstract class KerberosOperationHandler {
   public final static String KERBEROS_ENV_ENCRYPTION_TYPES = "encryption_types";
 
   /**
+   * Kerberos-env configuration property name: kdc_host
+   */
+  public final static String KERBEROS_ENV_KDC_HOST = "kdc_host";
+
+  /**
+   * Kerberos-env configuration property name: admin_server_host
+   */
+  public final static String KERBEROS_ENV_ADMIN_SERVER_HOST = "admin_server_host";
+
+  /**
    * The set of available characters to use when generating a secure password
    */
   private final static char[] SECURE_PASSWORD_CHARS =
@@ -317,94 +328,185 @@ public abstract class KerberosOperationHandler {
   }
 
   /**
-   * Create or append to a keytab file using the specified principal and password.
+   * Create a keytab using the specified principal and password.
    *
-   * @param principal  a String containing the principal to test
-   * @param password   a String containing the password to use when creating the principal
-   * @param keytabFile a File containing the absolute path to the keytab file
-   * @return true if the keytab file was successfully created; false otherwise
+   * @param principal a String containing the principal to test
+   * @param password  a String containing the password to use when creating the principal
+   * @param keyNumber a Integer indicating the key number for the keytab entries
+   * @return the created Keytab
    * @throws KerberosOperationException
    */
-  public boolean createKeytabFile(String principal, String password, Integer keyNumber, File keytabFile)
+  protected Keytab createKeytab(String principal, String password, Integer keyNumber)
       throws KerberosOperationException {
-    boolean success = false;
 
     if ((principal == null) || principal.isEmpty()) {
       throw new KerberosOperationException("Failed to create keytab file, missing principal");
-    } else if (password == null) {
+    }
+
+    if (password == null) {
       throw new KerberosOperationException(String.format("Failed to create keytab file for %s, missing password", principal));
-    } else if (keytabFile == null) {
-      throw new KerberosOperationException(String.format("Failed to create keytab file for %s, missing file path", principal));
-    } else {
-      Keytab keytab;
-      Set<EncryptionType> ciphers = new HashSet<EncryptionType>(keyEncryptionTypes);
-      List<KeytabEntry> keytabEntries = new ArrayList<KeytabEntry>();
-
-      if (keytabFile.exists() && keytabFile.canRead() && (keytabFile.length() > 0)) {
-        // If the keytab file already exists, read it in and append the new keytabs to it so that
-        // potentially important data is not lost
-        try {
-          keytab = Keytab.read(keytabFile);
-        } catch (IOException e) {
-          // There was an issue reading in the existing keytab file... we might loose some keytabs
-          // but that is unlikely...
-          keytab = new Keytab();
-        }
+    }
 
-        // In case there were any existing keytab entries, add them to the new entries list so
-        // they are not lost.  While at it, remove ciphers that already exist for the given principal
-        // so duplicate entries aren't added to the file.
-        List<KeytabEntry> existingEntries = keytab.getEntries();
-        if ((existingEntries != null) && !existingEntries.isEmpty()) {
+    Set<EncryptionType> ciphers = new HashSet<EncryptionType>(keyEncryptionTypes);
+    List<KeytabEntry> keytabEntries = new ArrayList<KeytabEntry>();
+    Keytab keytab = new Keytab();
 
-          for (KeytabEntry entry : existingEntries) {
-            // Remove ciphers that will cause duplicate entries
-            if (principal.equals(entry.getPrincipalName())) {
-              ciphers.remove(entry.getKey().getKeyType());
-            }
 
-            keytabEntries.add(entry);
-          }
+    if (!ciphers.isEmpty()) {
+      // Create a set of keys and relevant keytab entries
+      Map<EncryptionType, EncryptionKey> keys = KerberosKeyFactory.getKerberosKeys(principal, password, ciphers);
+
+      if (keys != null) {
+        byte keyVersion = (keyNumber == null) ? 0 : keyNumber.byteValue();
+        KerberosTime timestamp = new KerberosTime();
+
+        for (EncryptionKey encryptionKey : keys.values()) {
+          keytabEntries.add(new KeytabEntry(principal, 1, timestamp, keyVersion, encryptionKey));
         }
-      } else {
-        keytab = new Keytab();
+
+        keytab.setEntries(keytabEntries);
       }
+    }
 
-      if (ciphers.isEmpty()) {
-        // There are no new keys to create
-        success = true;
-      } else {
-        // Create a set of keys and relevant keytab entries
-        Map<EncryptionType, EncryptionKey> keys = KerberosKeyFactory.getKerberosKeys(principal, password, ciphers);
+    return keytab;
+  }
 
-        if (keys != null) {
-          byte keyVersion = (keyNumber == null) ? 0 : keyNumber.byteValue();
-          KerberosTime timestamp = new KerberosTime();
+  /**
+   * Create or append to a keytab file using keytab data from another keytab file.
+   * <p/>
+   * If the destination keytab file contains keytab data, that data will be merged with the new data
+   * to create a composite set of keytab entries.
+   *
+   * @param sourceKeytabFile      a File containing the absolute path to the file with the keytab data to store
+   * @param destinationKeytabFile a File containing the absolute path to where the keytab data is to be stored
+   * @return true if the keytab file was successfully created; false otherwise
+   * @throws KerberosOperationException
+   * @see #createKeytabFile(org.apache.directory.server.kerberos.shared.keytab.Keytab, java.io.File)
+   */
+  protected boolean createKeytabFile(File sourceKeytabFile, File destinationKeytabFile)
+      throws KerberosOperationException {
+    return createKeytabFile(readKeytabFile(sourceKeytabFile), destinationKeytabFile);
+  }
 
-          for (EncryptionKey encryptionKey : keys.values()) {
-            keytabEntries.add(new KeytabEntry(principal, 1, timestamp, keyVersion, encryptionKey));
-          }
+  /**
+   * Create or append to a keytab file using the specified principal and password.
+   * <p/>
+   * If the destination keytab file contains keytab data, that data will be merged with the new data
+   * to create a composite set of keytab entries.
+   *
+   * @param principal             a String containing the principal to test
+   * @param password              a String containing the password to use when creating the principal
+   * @param keyNumber             an Integer declaring the relevant key number to use for the keytabs entries
+   * @param destinationKeytabFile a File containing the absolute path to where the keytab data is to be stored
+   * @return true if the keytab file was successfully created; false otherwise
+   * @throws KerberosOperationException
+   * @see #createKeytabFile(org.apache.directory.server.kerberos.shared.keytab.Keytab, java.io.File)
+   */
+  protected boolean createKeytabFile(String principal, String password, Integer keyNumber, File destinationKeytabFile)
+      throws KerberosOperationException {
+    return createKeytabFile(createKeytab(principal, password, keyNumber), destinationKeytabFile);
+  }
 
-          keytab.setEntries(keytabEntries);
+  /**
+   * Create or append to a keytab file using the specified Keytab
+   * <p/>
+   * If the destination keytab file contains keytab data, that data will be merged with the new data
+   * to create a composite set of keytab entries.
+   *
+   * @param keytab                the Keytab containing the data to add to the keytab file
+   * @param destinationKeytabFile a File containing the absolute path to where the keytab data is to be stored
+   * @return true if the keytab file was successfully created; false otherwise
+   * @throws KerberosOperationException
+   */
+  protected boolean createKeytabFile(Keytab keytab, File destinationKeytabFile)
+      throws KerberosOperationException {
 
-          try {
-            keytab.write(keytabFile);
-            success = true;
-          } catch (IOException e) {
-            String message = String.format("Failed to export keytab file for %s", principal);
-            LOG.error(message, e);
+    if (destinationKeytabFile == null) {
+      throw new KerberosOperationException("The destination file path is null");
+    }
+
+    try {
+      mergeKeytabs(readKeytabFile(destinationKeytabFile), keytab).write(destinationKeytabFile);
+      return true;
+    } catch (IOException e) {
+      String message = "Failed to export keytab file";
+      LOG.error(message, e);
 
-            if (!keytabFile.delete()) {
-              keytabFile.deleteOnExit();
-            }
+      if (!destinationKeytabFile.delete()) {
+        destinationKeytabFile.deleteOnExit();
+      }
 
-            throw new KerberosOperationException(message, e);
+      throw new KerberosOperationException(message, e);
+    }
+  }
+
+  /**
+   * Merge the keytab data from one keytab with the keytab data from a different keytab.
+   * <p/>
+   * If similar key entries exist for the same principal, the updated values will be used
+   *
+   * @param keytab  a Keytab with the base keytab data
+   * @param updates a Keytab containing the updated keytab data
+   * @return a Keytab with the merged data
+   */
+  protected Keytab mergeKeytabs(Keytab keytab, Keytab updates) {
+    List<KeytabEntry> keytabEntries = (keytab == null)
+        ? Collections.<KeytabEntry>emptyList()
+        : new ArrayList<KeytabEntry>(keytab.getEntries());
+    List<KeytabEntry> updateEntries = (updates == null)
+        ? Collections.<KeytabEntry>emptyList()
+        : new ArrayList<KeytabEntry>(updates.getEntries());
+    List<KeytabEntry> mergedEntries = new ArrayList<KeytabEntry>();
+
+    if (keytabEntries.isEmpty()) {
+      mergedEntries.addAll(updateEntries);
+    } else if (updateEntries.isEmpty()) {
+      mergedEntries.addAll(keytabEntries);
+    } else {
+      Iterator<KeytabEntry> iterator = keytabEntries.iterator();
+
+      while (iterator.hasNext()) {
+        KeytabEntry keytabEntry = iterator.next();
+
+        for (KeytabEntry entry : updateEntries) {
+          if (entry.getPrincipalName().equals(keytabEntry.getPrincipalName()) &&
+              entry.getKey().getKeyType().equals(keytabEntry.getKey().getKeyType())) {
+            iterator.remove();
+            break;
           }
         }
       }
+
+      mergedEntries.addAll(keytabEntries);
+      mergedEntries.addAll(updateEntries);
+    }
+
+    Keytab mergedKeytab = new Keytab();
+    mergedKeytab.setEntries(mergedEntries);
+    return mergedKeytab;
+  }
+
+  /**
+   * Reads a file containing keytab data into a new Keytab
+   *
+   * @param file A File containing the path to the file from which to read keytab data
+   * @return a Keytab or null if the file was not readable
+   */
+  protected Keytab readKeytabFile(File file) {
+    Keytab keytab;
+
+    if (file.exists() && file.canRead() && (file.length() > 0)) {
+      try {
+        keytab = Keytab.read(file);
+      } catch (IOException e) {
+        // There was an issue reading in the existing keytab file... quietly assume no data
+        keytab = null;
+      }
+    } else {
+      keytab = null;
     }
 
-    return success;
+    return keytab;
   }
 
   public KerberosCredential getAdministratorCredentials() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index fc1729b..73a4ad6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -491,10 +491,6 @@ public abstract class KerberosServerAction extends AbstractServerAction {
         // by replacing the _HOST and _REALM variables.
         String evaluatedPrincipal = principal.replace("_HOST", host).replace("_REALM", defaultRealm);
 
-        String message = String.format("Processing identity for %s", evaluatedPrincipal);
-        actionLog.writeStdOut(message);
-        LOG.info(message);
-
         commandReport = processIdentity(record, evaluatedPrincipal, operationHandler, requestSharedDataContext);
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
index 0b9227f..69b0292 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
@@ -54,6 +54,8 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
   @Inject
   private Configuration configuration;
 
+  private String adminServerHost = null;
+
   /**
    * Prepares and creates resources to be used by this KerberosOperationHandler
    * <p/>
@@ -80,6 +82,7 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
 
     if (kerberosConfiguration != null) {
       setKeyEncryptionTypes(translateEncryptionTypes(kerberosConfiguration.get(KERBEROS_ENV_ENCRYPTION_TYPES), "\\s+"));
+      setAdminServerHost(kerberosConfiguration.get(KERBEROS_ENV_ADMIN_SERVER_HOST));
     }
 
     setOpen(true);
@@ -339,6 +342,12 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
         // Set the kdamin interface to be kadmin
         command.add(pathToCommand + "kadmin");
 
+        // Add explicit KDC admin host, if available
+        if(getAdminServerHost() != null) {
+          command.add("-s");
+          command.add(getAdminServerHost());
+        }
+
         // Add the administrative principal
         command.add("-p");
         command.add(adminPrincipal);
@@ -439,4 +448,22 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
 
     return result;
   }
+
+  /**
+   * Sets the KDC administrator server host address
+   *
+   * @param adminServerHost the ip address or FQDN of the KDC administrator server
+   */
+  public void setAdminServerHost(String adminServerHost) {
+    this.adminServerHost = adminServerHost;
+  }
+
+  /**
+   * Gets the IP address or FQDN of the KDC administrator server
+   *
+   * @return the IP address or FQDN of the KDC administrator server
+   */
+  public String getAdminServerHost() {
+    return adminServerHost;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
index 59e6104..79537d4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
@@ -31,6 +31,7 @@ import java.util.Map;
  * <li>owner {name, access}</li>
  * <li>group {name, access}</li>
  * <li>configuration</li>
+ * <li>cachable</li>
  * </ul>
  * <p/>
  * The following JSON Schema will yield a valid KerberosPrincipalDescriptor
@@ -81,6 +82,11 @@ import java.util.Map;
  *                          - format: config-type/property.name",
  *          "type": "string"
  *        }
+ *        "cachable" : {
+ *          "description": "Indicates whether the generated keytab is allowed to be cached by the
+ *                          Ambari server (true) or not (false)",
+ *          "type": "boolean"
+ *        }
  *      }
  *   }
  * </pre>
@@ -143,6 +149,12 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
 
 
   /**
+   * A boolean value indicating whether the generated keytab is allowed to be cached by the Ambari
+   * server or not.
+   */
+  private boolean cachable = true;
+
+  /**
    * Creates a new KerberosKeytabDescriptor
    * <p/>
    * See {@link org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor} for the JSON
@@ -174,6 +186,9 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
       }
 
       setConfiguration(getStringValue(data, "configuration"));
+
+      // If the "cachable" value is anything but false, set it to true
+      setCachable(!"false".equalsIgnoreCase(getStringValue(data, "cachable")));
     }
   }
 
@@ -310,6 +325,24 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
   }
 
   /**
+   * Indicates whether the generated keytab is allowed to be cached by the Ambari server or not
+   *
+   * @return true if allowed to be cached; false otherwise
+   */
+  public boolean isCachable() {
+    return cachable;
+  }
+
+  /**
+   * Sets whether the generated keytab is allowed to be cached by the Ambari server or not
+   *
+   * @param cachable true if allowed to be cached; false otherwise
+   */
+  public void setCachable(boolean cachable) {
+    this.cachable = cachable;
+  }
+
+  /**
    * Updates this KerberosKeytabDescriptor with data from another KerberosKeytabDescriptor
    * <p/>
    * Properties will be updated if the relevant updated values are not null.

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/python/ambari_server/serverConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverConfiguration.py b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
index 9dfda01..c5c7ff3 100644
--- a/ambari-server/src/main/python/ambari_server/serverConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
@@ -322,6 +322,8 @@ class ServerConfigDefaultsLinux(ServerConfigDefaults):
       ("/var/run/ambari-server/stack-recommendations/", "755", "{0}", False),
       ("/var/lib/ambari-server/data/tmp/", "644", "{0}", True),
       ("/var/lib/ambari-server/data/tmp/", "755", "{0}", False),
+      ("/var/lib/ambari-server/data/cache/", "600", "{0}", True),
+      ("/var/lib/ambari-server/data/cache/", "700", "{0}", False),
       # Also, /etc/ambari-server/conf/password.dat
       # is generated later at store_password_file
     ]

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index 15a39d9..31833cb 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -61,8 +61,24 @@
     <value/>
   </property>
 
+  <property require-input="true">
+    <name>kdc_host</name>
+    <description>
+      The IP address or FQDN for the KDC host. Optionally a port number may be included.
+    </description>
+    <value/>
+  </property>
+
+  <property>
+    <name>admin_server_host</name>
+    <description>
+      The IP address or FQDN for the KDC Kerberos administrative host. Optionally a port number may be included.
+    </description>
+    <value/>
+  </property>
+
 
-    <property require-input="true">
+  <property require-input="true">
     <name>create_attributes_template</name>
     <description>
       A Velocity template to use to generate a JSON-formatted document containing the set of

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
index 02d78b8..3a6207b 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
@@ -21,79 +21,6 @@
 -->
 
 <configuration>
-  <property>
-    <name>logging_default</name>
-    <description>
-      Default Kerberos library log location.
-    </description>
-    <value>FILE:/var/log/krb5libs.log</value>
-  </property>
-  <property>
-    <name>logging_kdc</name>
-    <description>
-      KDC log location.
-    </description>
-    <value>FILE:/var/log/krb5kdc.log</value>
-  </property>
-  <property>
-    <name>logging_admin_server</name>
-    <description>
-      Admin server log location.
-    </description>
-    <value>FILE:/var/log/kadmind.log</value>
-  </property>
-
-  <property>
-    <name>libdefaults_dns_lookup_realm</name>
-    <description>
-      If true, DNS TXT records will be used to determine the Kerberos realm of a host.
-    </description>
-    <value>false</value>
-  </property>
-  <property>
-    <name>libdefaults_dns_lookup_kdc</name>
-    <description>
-      If true, DNS SRV records will be used to locate the KDCs and other servers for the realm.
-    </description>
-    <value>false</value>
-  </property>
-  <property>
-    <name>libdefaults_ticket_lifetime</name>
-    <description>
-      Default lifetime of a ticket.
-    </description>
-    <value>24h</value>
-  </property>
-  <property>
-    <name>libdefaults_renew_lifetime</name>
-    <description>
-      Default renewable lifetime for initial tickets.
-    </description>
-    <value>7d</value>
-  </property>
-  <property>
-    <name>libdefaults_forwardable</name>
-    <description>
-      If true, initial tickets will be forwardable.
-    </description>
-    <value>true</value>
-  </property>
-  <property require-input="false">
-    <name>libdefaults_default_tgs_enctypes</name>
-    <description>
-      A space-delimited list of session key encryption types supported by the KDC or Active
-      Directory
-    </description>
-    <value/>
-  </property>
-  <property require-input="false">
-    <name>libdefaults_default_tkt_enctypes</name>
-    <description>
-      A space-delimited list of session key encryption types supported by the KDC or Active
-      Directory.
-    </description>
-    <value/>
-  </property>
   <property require-input="false">
     <name>domains</name>
     <description>
@@ -101,20 +28,6 @@
     </description>
     <value/>
   </property>
-  <property require-input="true">
-    <name>kdc_host</name>
-    <description>
-      The IP address or FQDN for the KDC host. Optionally a port number may be included.
-    </description>
-    <value/>
-  </property>
-  <property>
-    <name>admin_server_host</name>
-    <description>
-      The IP address or FQDN for the KDC Kerberos administrative host. Optionally a port number may be included.
-    </description>
-    <value/>
-  </property>
 
   <property>
     <name>manage_krb5_conf</name>
@@ -134,18 +47,14 @@
     <description>Customizable krb5.conf template (Jinja template engine)</description>
     <value>
 [libdefaults]
-  renew_lifetime = {{libdefaults_renew_lifetime}}
-  forwardable = {{libdefaults_forwardable}}
+  renew_lifetime = 7d
+  forwardable = true
   default_realm = {{realm|upper()}}
-  ticket_lifetime = {{libdefaults_ticket_lifetime}}
-  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}
-  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}
-  {% if libdefaults_default_tgs_enctypes %}
-  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}
-  {% endif %}
-  {% if libdefaults_default_tkt_enctypes %}
-  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}
-  {% endif %}
+  ticket_lifetime = 24h
+  dns_lookup_realm = false
+  dns_lookup_kdc = false
+  #default_tgs_enctypes = {{encryption_types}}
+  #default_tkt_enctypes = {{encryption_types}}
 
 {% if domains %}
 [domain_realm]
@@ -155,12 +64,9 @@
 {% endif %}
 
 [logging]
-  default = {{logging_default}}
-{#
-# The following options are unused unless a managed KDC is installed
-  admin_server = {{logging_admin_server}}
-  kdc = {{logging_admin_kdc}}
-#}
+  default = FILE:/var/log/krb5kdc.log
+  admin_server = FILE:/var/log/kadmind.log
+  kdc = FILE:/var/log/krb5kdc.log
 
 [realms]
   {{realm}} = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
index 3ccbc3e..18255bd 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
@@ -99,17 +99,6 @@ if config is not None:
   # ################################################################################################
   # Get krb5.conf template data
   # ################################################################################################
-  logging_default = 'FILE:/var/log/krb5libs.log'
-  logging_kdc = 'FILE:/var/log/krb5kdc.log'
-  logging_admin_server = 'FILE:/var/log/kadmind.log'
-  libdefaults_dns_lookup_realm = 'false'
-  libdefaults_dns_lookup_kdc = 'false'
-  libdefaults_ticket_lifetime = '24h'
-  libdefaults_renew_lifetime = '7d'
-  libdefaults_forwardable = 'true'
-  libdefaults_default_tgs_enctypes = None
-  libdefaults_default_tkt_enctypes = None
-
   realm = 'EXAMPLE.COM'
   domains = ''
   kdc_host = 'localhost'
@@ -132,33 +121,12 @@ if config is not None:
   if kerberos_env is not None:
     encryption_types = get_property_value(kerberos_env, "encryption_types", None, True, None)
     realm = get_property_value(kerberos_env, "realm", None, True, None)
+    kdc_host = get_property_value(kerberos_env, 'kdc_host', kdc_host)
+    admin_server_host = get_property_value(kerberos_env, 'admin_server_host', admin_server_host)
 
   if krb5_conf_data is not None:
-    logging_default = get_property_value(krb5_conf_data, 'logging_default', logging_default)
-    logging_kdc = get_property_value(krb5_conf_data, 'logging_kdc', logging_kdc)
-    logging_admin_server = get_property_value(krb5_conf_data, 'logging_admin_server',
-                                              logging_admin_server)
-    libdefaults_dns_lookup_realm = get_property_value(krb5_conf_data,
-                                                      'libdefaults_dns_lookup_realm',
-                                                      libdefaults_dns_lookup_realm)
-    libdefaults_dns_lookup_kdc = get_property_value(krb5_conf_data, 'libdefaults_dns_lookup_kdc',
-                                                    libdefaults_dns_lookup_kdc)
-    libdefaults_ticket_lifetime = get_property_value(krb5_conf_data, 'libdefaults_ticket_lifetime',
-                                                     libdefaults_ticket_lifetime)
-    libdefaults_renew_lifetime = get_property_value(krb5_conf_data, 'libdefaults_renew_lifetime',
-                                                    libdefaults_renew_lifetime)
-    libdefaults_forwardable = get_property_value(krb5_conf_data, 'libdefaults_forwardable',
-                                                 libdefaults_forwardable)
-    libdefaults_default_tgs_enctypes = get_property_value(krb5_conf_data,
-                                                          'libdefaults_default_tgs_enctypes',
-                                                          libdefaults_default_tgs_enctypes)
-    libdefaults_default_tkt_enctypes = get_property_value(krb5_conf_data,
-                                                          'libdefaults_default_tkt_enctypes',
-                                                          libdefaults_default_tkt_enctypes)
     realm = get_property_value(krb5_conf_data, 'realm', realm)
     domains = get_property_value(krb5_conf_data, 'domains', domains)
-    kdc_host = get_property_value(krb5_conf_data, 'kdc_host', kdc_host)
-    admin_server_host = get_property_value(krb5_conf_data, 'admin_server_host', admin_server_host)
 
     admin_principal = get_property_value(krb5_conf_data, 'admin_principal', admin_principal, True,
                                          None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/templates/krb5_conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/templates/krb5_conf.j2 b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/templates/krb5_conf.j2
index 0d915ba..cc6f63a 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/templates/krb5_conf.j2
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/templates/krb5_conf.j2
@@ -16,18 +16,14 @@
 # limitations under the License.
 #}
 [libdefaults]
-  renew_lifetime = {{libdefaults_renew_lifetime}}
-  forwardable = {{libdefaults_forwardable}}
+  renew_lifetime = 7d
+  forwardable = true
   default_realm = {{realm|upper()}}
-  ticket_lifetime = {{libdefaults_ticket_lifetime}}
-  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}
-  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}
-  {% if libdefaults_default_tgs_enctypes %}
-  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}
-  {% endif %}
-  {% if libdefaults_default_tkt_enctypes %}
-  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}
-  {% endif %}
+  ticket_lifetime = 24h
+  dns_lookup_realm = false
+  dns_lookup_kdc = false
+  #default_tgs_enctypes = {{encryption_types}}
+  #default_tkt_enctypes = {{encryption_types}}
 
 {% if domains %}
 [domain_realm]
@@ -37,12 +33,9 @@
 {% endif %}
 
 [logging]
-  default = {{logging_default}}
-{#
-# The following options are unused unless a managed KDC is installed
-  admin_server = {{logging_admin_server}}
-  kdc = {{logging_admin_kdc}}
-#}
+  default = FILE:/var/log/krb5kdc.log
+  admin_server = FILE:/var/log/kadmind.log
+  kdc = FILE:/var/log/krb5kdc.log
 
 [realms]
   {{realm}} = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
index 43050bd..8622e13 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
@@ -21,55 +21,6 @@
 -->
 
 <configuration>
-  <property>
-    <name>logging_default</name>
-    <value>FILE:/var/log/krb5libs.log</value>
-  </property>
-  <property>
-    <name>logging_kdc</name>
-    <value>FILE:/var/log/krb5kdc.log</value>
-  </property>
-  <property>
-    <name>logging_admin_server</name>
-    <value>FILE:/var/log/kadmind.log</value>
-  </property>
-
-  <property>
-    <name>libdefaults_dns_lookup_realm</name>
-    <value>false</value>
-  </property>
-  <property>
-    <name>libdefaults_dns_lookup_kdc</name>
-    <value>false</value>
-  </property>
-  <property>
-    <name>libdefaults_ticket_lifetime</name>
-    <value>24h</value>
-  </property>
-  <property>
-    <name>libdefaults_renew_lifetime</name>
-    <value>7d</value>
-  </property>
-  <property>
-    <name>libdefaults_forwardable</name>
-    <value>true</value>
-  </property>
-  <property require-input="false">
-    <name>libdefaults_default_tgs_enctypes</name>
-    <description>
-      A space-delimited list of session key encryption types supported by the KDC or Active
-      Directory
-    </description>
-    <value/>
-  </property>
-  <property require-input="false">
-    <name>libdefaults_default_tkt_enctypes</name>
-    <description>
-      A space-delimited list of session key encryption types supported by the KDC or Active
-      Directory
-    </description>
-    <value/>
-  </property>
   <property require-input="false">
     <name>domains</name>
     <description>
@@ -77,22 +28,6 @@
     </description>
     <value/>
   </property>
-  <property require-input="true">
-    <name>kdc_host</name>
-    <description>
-      The IP address or FQDN of the KDC or Active Directory server, optionally a port number may be
-      provided
-    </description>
-    <value/>
-  </property>
-  <property>
-    <name>admin_server_host</name>
-    <description>
-      The IP address or FQDN of the administrative Kerberos server, optionally a port number may be
-      provided
-    </description>
-    <value/>
-  </property>
   <property>
     <name>test_principal</name>
     <description>
@@ -138,18 +73,14 @@
     <description>The jinja template for the krb5.conf file</description>
     <value>
 [libdefaults]
-  renew_lifetime = {{libdefaults_renew_lifetime}}
-  forwardable = {{libdefaults_forwardable}}
+  renew_lifetime = 7d
+  forwardable = true
   default_realm = {{realm|upper()}}
-  ticket_lifetime = {{libdefaults_ticket_lifetime}}
-  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}
-  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}
-  {% if libdefaults_default_tgs_enctypes %}
-  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}
-  {% endif %}
-  {% if libdefaults_default_tkt_enctypes %}
-  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}
-  {% endif %}
+  ticket_lifetime = 24h
+  dns_lookup_realm = false
+  dns_lookup_kdc = false
+  #default_tgs_enctypes = {{encryption_types}}
+  #default_tkt_enctypes = {{encryption_types}}
 
 {% if domains %}
 [domain_realm]
@@ -159,12 +90,9 @@
 {% endif %}
 
 [logging]
-  default = {{logging_default}}
-{#
-# The following options are unused unless a managed KDC is installed
-  admin_server = {{logging_admin_server}}
-  kdc = {{logging_admin_kdc}}
-#}
+  default = FILE:/var/log/krb5kdc.log
+  admin_server = FILE:/var/log/kadmind.log
+  kdc = FILE:/var/log/krb5kdc.log
 
 [realms]
   {{realm}} = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
index 31e4134..1c2061a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
@@ -88,20 +88,6 @@ if config is not None:
   # ################################################################################################
   # Get krb5.conf template data
   # ################################################################################################
-  logging_default = 'FILE:/var/log/krb5libs.log'
-  logging_kdc = 'FILE:/var/log/krb5kdc.log'
-  logging_admin_server = 'FILE:/var/log/kadmind.log'
-  libdefaults_dns_lookup_realm = 'false'
-  libdefaults_dns_lookup_kdc = 'false'
-  libdefaults_ticket_lifetime = '24h'
-  libdefaults_renew_lifetime = '7d'
-  libdefaults_forwardable = 'true'
-  libdefaults_default_tgs_enctypes = 'aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 ' \
-                                     'arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac ' \
-                                     'des-cbc-crc des-cbc-md5 des-cbc-md4'
-  libdefaults_default_tkt_enctypes = 'aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 ' \
-                                     'arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac ' \
-                                     'des-cbc-crc des-cbc-md5 des-cbc-md4'
   realm = 'EXAMPLE.COM'
   domains = ''
   kdc_host = 'localhost'
@@ -127,33 +113,12 @@ if config is not None:
   if kerberos_env is not None:
     encryption_types = get_property_value(kerberos_env, "encryption_types", None)
     realm = get_property_value(kerberos_env, "realm", None)
+    kdc_host = get_property_value(kerberos_env, 'kdc_host', kdc_host)
+    admin_server_host = get_property_value(kerberos_env, 'admin_server_host', admin_server_host)
 
   if krb5_conf_data is not None:
-    logging_default = get_property_value(krb5_conf_data, 'logging_default', logging_default)
-    logging_kdc = get_property_value(krb5_conf_data, 'logging_kdc', logging_kdc)
-    logging_admin_server = get_property_value(krb5_conf_data, 'logging_admin_server',
-                                              logging_admin_server)
-    libdefaults_dns_lookup_realm = get_property_value(krb5_conf_data,
-                                                      'libdefaults_dns_lookup_realm',
-                                                      libdefaults_dns_lookup_realm)
-    libdefaults_dns_lookup_kdc = get_property_value(krb5_conf_data, 'libdefaults_dns_lookup_kdc',
-                                                    libdefaults_dns_lookup_kdc)
-    libdefaults_ticket_lifetime = get_property_value(krb5_conf_data, 'libdefaults_ticket_lifetime',
-                                                     libdefaults_ticket_lifetime)
-    libdefaults_renew_lifetime = get_property_value(krb5_conf_data, 'libdefaults_renew_lifetime',
-                                                    libdefaults_renew_lifetime)
-    libdefaults_forwardable = get_property_value(krb5_conf_data, 'libdefaults_forwardable',
-                                                 libdefaults_forwardable)
-    libdefaults_default_tgs_enctypes = get_property_value(krb5_conf_data,
-                                                          'libdefaults_default_tgs_enctypes',
-                                                          encryption_types)
-    libdefaults_default_tkt_enctypes = get_property_value(krb5_conf_data,
-                                                          'libdefaults_default_tkt_enctypes',
-                                                          encryption_types)
     realm = get_property_value(krb5_conf_data, 'realm', realm)
     domains = get_property_value(krb5_conf_data, 'domains', domains)
-    kdc_host = get_property_value(krb5_conf_data, 'kdc_host', kdc_host)
-    admin_server_host = get_property_value(krb5_conf_data, 'admin_server_host', admin_server_host)
 
     admin_principal = get_property_value(krb5_conf_data, 'admin_principal', admin_principal)
     admin_password = get_property_value(krb5_conf_data, 'admin_password', admin_password)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
index 0d915ba..cc6f63a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
@@ -16,18 +16,14 @@
 # limitations under the License.
 #}
 [libdefaults]
-  renew_lifetime = {{libdefaults_renew_lifetime}}
-  forwardable = {{libdefaults_forwardable}}
+  renew_lifetime = 7d
+  forwardable = true
   default_realm = {{realm|upper()}}
-  ticket_lifetime = {{libdefaults_ticket_lifetime}}
-  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}
-  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}
-  {% if libdefaults_default_tgs_enctypes %}
-  default_tgs_enctypes = {{libdefaults_default_tgs_enctypes}}
-  {% endif %}
-  {% if libdefaults_default_tkt_enctypes %}
-  default_tkt_enctypes = {{libdefaults_default_tkt_enctypes}}
-  {% endif %}
+  ticket_lifetime = 24h
+  dns_lookup_realm = false
+  dns_lookup_kdc = false
+  #default_tgs_enctypes = {{encryption_types}}
+  #default_tkt_enctypes = {{encryption_types}}
 
 {% if domains %}
 [domain_realm]
@@ -37,12 +33,9 @@
 {% endif %}
 
 [logging]
-  default = {{logging_default}}
-{#
-# The following options are unused unless a managed KDC is installed
-  admin_server = {{logging_admin_server}}
-  kdc = {{logging_admin_kdc}}
-#}
+  default = FILE:/var/log/krb5kdc.log
+  admin_server = FILE:/var/log/kadmind.log
+  kdc = FILE:/var/log/krb5kdc.log
 
 [realms]
   {{realm}} = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8b4ef2b6/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index 03d3a91..5541523 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -2619,7 +2619,7 @@ public class TestHeartbeatHandler {
     kerberosActionDataFileBuilder.addRecord("c6403.ambari.apache.org", "HDFS", "DATANODE",
         "dn/_HOST@_REALM", "service", "hdfs-site/dfs.namenode.kerberos.principal",
         "/etc/security/keytabs/dn.service.keytab",
-        "hdfs", "r", "hadoop", "", "hdfs-site/dfs.namenode.keytab.file");
+        "hdfs", "r", "hadoop", "", "hdfs-site/dfs.namenode.keytab.file", "false");
 
     kerberosActionDataFileBuilder.close();