You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ma...@apache.org on 2016/03/30 19:18:41 UTC

[1/2] ambari git commit: AMBARI-15561. Automate creation of Ambari Server proxy users (secure/non-secure clusters), principal and keytab, setup of JAAS (secure clusters) (magyari_sandor)

Repository: ambari
Updated Branches:
  refs/heads/trunk deb45e81a -> 6c008c83d


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 9a61f99..681824a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -510,7 +510,7 @@ class TestHDP206StackAdvisor(TestCase):
 
   def test_recommendYARNConfigurations(self):
     configurations = {}
-    services = {"configurations": configurations}
+    services = {"configurations": configurations, "services": []}
     clusterData = {
       "containers" : 5,
       "ramPerContainer": 256
@@ -1054,7 +1054,8 @@ class TestHDP206StackAdvisor(TestCase):
             "hostnames": ["c6401.ambari.apache.org", "c6402.ambari.apache.org"]
           }, }]
         }],
-      "configurations": configurations
+      "configurations": configurations,
+      "ambari-server-properties": {"ambari-server.user":"ambari_user"}
     }
 
     clusterData = {
@@ -1065,7 +1066,9 @@ class TestHDP206StackAdvisor(TestCase):
                      {'oozie_user': 'oozie'}},
                 'core-site':
                   {'properties':
-                     {'hadoop.proxyuser.oozie.groups': '*',
+                     {'hadoop.proxyuser.ambari_user.groups': '*',
+                      'hadoop.proxyuser.ambari_user.hosts': '*',
+                      'hadoop.proxyuser.oozie.groups': '*',
                       'hadoop.proxyuser.hive.groups': '*',
                       'hadoop.proxyuser.webhcat.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
                       'hadoop.proxyuser.falcon.hosts': '*',
@@ -1110,7 +1113,9 @@ class TestHDP206StackAdvisor(TestCase):
                   {'properties':
                      {'oozie_user': 'oozie'}},
                 'core-site': {'properties':
-                                {'hadoop.proxyuser.oozie.groups': '*',
+                                {'hadoop.proxyuser.ambari_user.groups': '*',
+                                 'hadoop.proxyuser.ambari_user.hosts': '*',
+                                 'hadoop.proxyuser.oozie.groups': '*',
                                  'hadoop.proxyuser.hive.groups': '*',
                                  'hadoop.proxyuser.hdfs1.groups': '*',
                                  'hadoop.proxyuser.hdfs1.hosts': '*',

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
index 55cf39d..ac4b5f5 100644
--- a/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
@@ -118,7 +118,7 @@ class TestHDP21StackAdvisor(TestCase):
       }
     }
 
-    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData, {"configurations": {}}, None)
+    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData, {"configurations": {}, "services": []}, None)
     self.maxDiff = None
     self.assertEquals(configurations, expected)
 
@@ -143,7 +143,7 @@ class TestHDP21StackAdvisor(TestCase):
       }
     }
 
-    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData, {"configurations":{}}, None)
+    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData, {"configurations":{}, "services": []}, None)
     self.assertEquals(configurations, expected)
 
   def test_createComponentLayoutRecommendations_mastersIn10nodes(self):
@@ -276,7 +276,7 @@ class TestHDP21StackAdvisor(TestCase):
       }
     }
 
-    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData, {"configurations":{}}, None)
+    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData, {"configurations":{}, "services": []}, None)
     self.assertEquals(configurations, expected)
 
   def test_recommendHbaseConfigurations(self):
@@ -361,7 +361,8 @@ class TestHDP21StackAdvisor(TestCase):
             "service_name": "HDFS"
           }, "components": []
         }],
-      "configurations": configurations
+      "configurations": configurations,
+      "ambari-server-properties": {"ambari-server.user":"ambari_user"}
     }
 
     clusterData = {
@@ -380,6 +381,8 @@ class TestHDP21StackAdvisor(TestCase):
         "properties": {
           "hadoop.proxyuser.hdfs.hosts": "*",
           "hadoop.proxyuser.hdfs.groups": "*",
+          "hadoop.proxyuser.ambari_user.hosts": "*",
+          "hadoop.proxyuser.ambari_user.groups": "*"
         }
       },
       "hdfs-site": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 06fb9f3..c48b2c7 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -2814,6 +2814,8 @@ class TestHDP22StackAdvisor(TestCase):
         "properties": {
           "hadoop.proxyuser.hdfs.hosts": "*",
           "hadoop.proxyuser.hdfs.groups": "*",
+          "hadoop.proxyuser.ambari_user.hosts": "*",
+          "hadoop.proxyuser.ambari_user.groups": "*"
         }
       }
     }
@@ -2907,7 +2909,8 @@ class TestHDP22StackAdvisor(TestCase):
                         },
                       ],
                     }],
-                "configurations": configurations
+                "configurations": configurations,
+                "ambari-server-properties": {"ambari-server.user":"ambari_user"}
                 }
     hosts = {
       "items" : [

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index d415b6f..39f0c06 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -819,7 +819,8 @@ class TestHDP23StackAdvisor(TestCase):
       "Versions": {
         "stack_version": "2.3"
       },
-      "configurations": configurations
+      "configurations": configurations,
+      "ambari-server-properties": {"ambari-server.user":"ambari_user"}
     }
 
     # Test with Ranger HDFS plugin disabled


[2/2] ambari git commit: AMBARI-15561. Automate creation of Ambari Server proxy users (secure/non-secure clusters), principal and keytab, setup of JAAS (secure clusters) (magyari_sandor)

Posted by ma...@apache.org.
AMBARI-15561. Automate creation of Ambari Server proxy users (secure/non-secure clusters), principal and keytab, setup of JAAS (secure clusters) (magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6c008c83
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6c008c83
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6c008c83

Branch: refs/heads/trunk
Commit: 6c008c83d1fa069d9a4fe30c7b2928dbaad86242
Parents: deb45e8
Author: Sandor Magyari <sm...@hortonworks.com>
Authored: Fri Mar 18 13:32:59 2016 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Wed Mar 30 19:14:30 2016 +0200

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     |   2 +
 .../server/controller/KerberosHelper.java       |  22 +-
 .../server/controller/KerberosHelperImpl.java   | 332 +++++++++++++++----
 .../AbstractPrepareKerberosServerAction.java    |  93 ++++--
 .../ConfigureAmbariIndetityServerAction.java    | 261 +++++++++++++++
 .../kerberos/CreateKeytabFilesServerAction.java |   7 +-
 .../kerberos/DestroyPrincipalsServerAction.java |  12 +
 .../kerberos/KerberosServerAction.java          |   6 +
 .../PrepareDisableKerberosServerAction.java     |  19 +-
 .../PrepareEnableKerberosServerAction.java      |   7 +-
 .../PrepareKerberosIdentitiesServerAction.java  |  29 +-
 .../1.10.3-10/configuration/kerberos-env.xml    |  14 +
 .../resources/stacks/HDP/2.0.6/kerberos.json    |  16 +
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  46 +++
 .../stacks/HDP/2.1/services/stack_advisor.py    |  13 +
 .../server/controller/KerberosHelperTest.java   |  17 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |  13 +-
 .../stacks/2.1/common/test_stack_advisor.py     |  11 +-
 .../stacks/2.2/common/test_stack_advisor.py     |   5 +-
 .../stacks/2.3/common/test_stack_advisor.py     |   3 +-
 20 files changed, 809 insertions(+), 119 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 1d30f1c..0302b6e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -294,6 +294,8 @@ public class Configuration {
   public static final String OPERATIONS_RETRY_ATTEMPTS_DEFAULT = "0";
   public static final int RETRY_ATTEMPTS_LIMIT = 10;
 
+  public static final String AMBARI_SERVER_USER = "ambari-server.user";
+
   public static final String SERVER_JDBC_RCA_USER_NAME_KEY = "server.jdbc.rca.user.name";
   public static final String SERVER_JDBC_RCA_USER_PASSWD_KEY = "server.jdbc.rca.user.passwd";
   public static final String SERVER_JDBC_RCA_DRIVER_KEY = "server.jdbc.rca.driver";

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index 39f55cd..5ffc8a3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -66,6 +66,14 @@ public interface KerberosHelper {
    */
   String KDC_ADMINISTRATOR_CREDENTIAL_ALIAS = "kdc.admin.credential";
 
+  String AMBARI_SERVER_HOST_NAME = "ambari_server";
+
+  String AMBARI_IDENTITY_NAME = "ambari-server";
+
+  String CREATE_AMBARI_PRINCIPAL = "create_ambari_principal";
+
+  String MANAGE_IDENTITIES = "manage_identities";
+
   /**
    * Toggles Kerberos security to enable it or remove it depending on the state of the cluster.
    * <p/>
@@ -250,13 +258,21 @@ public interface KerberosHelper {
 
   /**
    * Invokes the Stack Advisor to help determine relevant configuration changes when enabling or
-   * disabling Kerberos
+   * disabling Kerberos. If kerberosEnabled = true, recommended properties are inserted into kerberosConfigurations,
+   * while properties to remove in propertiesToRemove map. In case kerberosEnabled = false, recommended properties
+   * are inserted into propertiesToInsert and properties to remove in propertiesToInsert map. This is because in
+   * first case properties in kerberosConfigurations are going to be set, while in second case going to be
+   * removed from cluster config.
    *
    * @param cluster                a cluster
    * @param services               a set of services that are being configured to enabled or disable Kerberos
    * @param existingConfigurations the cluster's existing configurations
-   * @param kerberosConfigurations the configuration updates to make (must not be mutable)
+   * @param kerberosConfigurations the configuration updates to make
    * @param propertiesToIgnore     the configuration properties that should be ignored when applying recommendations
+   * @param propertiesToInsert     the configuration properties that must be inserted to cluster config are inserted
+   *                               into this map in case if provided (not null) and kerberosEnabled = false
+   * @param propertiesToRemove     the configuration properties that must be removed from cluster config are inserted
+   *                               into this map in case if provided (not null) and kerberosEnabled
    * @param kerberosEnabled        true if kerberos is (to be) enabled; otherwise false
    * @return the configuration updates
    * @throws AmbariException
@@ -265,6 +281,8 @@ public interface KerberosHelper {
                                                             Map<String, Map<String, String>> existingConfigurations,
                                                             Map<String, Map<String, String>> kerberosConfigurations,
                                                             Map<String, Set<String>> propertiesToIgnore,
+                                                            Map<String, Map<String, String>> propertiesToInsert,
+                                                            Map<String, Set<String>> propertiesToRemove,
                                                             boolean kerberosEnabled)
       throws AmbariException;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index f7326a0..350ad6d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -48,6 +48,7 @@ import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
+import org.apache.ambari.server.controller.utilities.KerberosChecker;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
@@ -58,6 +59,7 @@ import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
+import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIndetityServerAction;
 import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
 import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
 import org.apache.ambari.server.serveraction.kerberos.DestroyPrincipalsServerAction;
@@ -94,6 +96,7 @@ import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.ValueAttributesInfo;
 import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
@@ -251,9 +254,9 @@ public class KerberosHelperImpl implements KerberosHelper {
               CreatePrincipalsAndKeytabsHandler handler = null;
 
               if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
-                handler = new CreatePrincipalsAndKeytabsHandler(true, true);
+                handler = new CreatePrincipalsAndKeytabsHandler(true, true, true);
               } else if ("missing".equalsIgnoreCase(value)) {
-                handler = new CreatePrincipalsAndKeytabsHandler(false, true);
+                handler = new CreatePrincipalsAndKeytabsHandler(false, true, true);
               }
 
               if (handler != null) {
@@ -282,7 +285,8 @@ public class KerberosHelperImpl implements KerberosHelper {
                                                 RequestStageContainer requestStageContainer, Boolean manageIdentities)
       throws AmbariException, KerberosOperationException {
     return handle(cluster, getKerberosDetails(cluster, manageIdentities), serviceComponentFilter, hostFilter, identityFilter,
-        hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false, false));
+        hostsToForceKerberosOperations, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false, false,
+        false));
   }
 
   @Override
@@ -402,17 +406,45 @@ public class KerberosHelperImpl implements KerberosHelper {
       }
     }
 
+
+    if (kerberosDetails.createAmbariPrincipal()) {
+      KerberosIdentityDescriptor ambariServerIdentityDescriptor = kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME);
+      Map<String, Map<String, String>> map = new HashMap<String, Map<String, String>>();
+      if (ambariServerIdentityDescriptor != null) {
+
+        KerberosPrincipalDescriptor principalDescriptor = ambariServerIdentityDescriptor.getPrincipalDescriptor();
+        if (principalDescriptor != null) {
+          putConfiguration(map, principalDescriptor.getConfiguration(), principalDescriptor.getValue());
+        }
+
+        KerberosKeytabDescriptor keytabDescriptor = ambariServerIdentityDescriptor.getKeytabDescriptor();
+        if (keytabDescriptor != null) {
+          putConfiguration(map, keytabDescriptor.getConfiguration(), keytabDescriptor.getFile());
+        }
+
+        for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
+          String configType = entry.getKey();
+          mergeConfigurations(kerberosConfigurations, configType, entry.getValue(), configurations);
+        }
+      }
+    }
+
     setAuthToLocalRules(kerberosDescriptor, cluster, kerberosDetails.getDefaultRealm(), configurations, kerberosConfigurations);
 
     return (applyStackAdvisorUpdates)
-        ? applyStackAdvisorUpdates(cluster, services, configurations, kerberosConfigurations, propertiesToIgnore, kerberosEnabled)
+        ? applyStackAdvisorUpdates(cluster, services, configurations, kerberosConfigurations, propertiesToIgnore,
+      null, null,
+      kerberosEnabled)
         : kerberosConfigurations;
   }
 
+  @Override
   public Map<String, Map<String, String>> applyStackAdvisorUpdates(Cluster cluster, Set<String> services,
-                                                                    Map<String, Map<String, String>> existingConfigurations,
-                                                                    Map<String, Map<String, String>> kerberosConfigurations,
-                                                                    Map<String, Set<String>> propertiesToIgnore,
+                                                                   Map<String, Map<String, String>> existingConfigurations,
+                                                                   Map<String, Map<String, String>> kerberosConfigurations,
+                                                                   Map<String, Set<String>> propertiesToIgnore,
+                                                                   Map<String, Map<String, String>> propertiesToInsert,
+                                                                   Map<String, Set<String>> propertiesToRemove,
                                                                    boolean kerberosEnabled) throws AmbariException {
 
     StackId stackVersion = cluster.getCurrentStackVersion();
@@ -430,7 +462,7 @@ public class KerberosHelperImpl implements KerberosHelper {
     // will throw a StackAdvisorException stating "Hosts and services must not be empty".
     // This could happen when enabling Kerberos while installing a cluster via Blueprints due to the
     // way hosts are discovered during the install process.
-    if(!hostNames.isEmpty()) {
+    if (!hostNames.isEmpty()) {
       Map<String, Map<String, Map<String, String>>> requestConfigurations = new HashMap<String, Map<String, Map<String, String>>>();
       if (existingConfigurations != null) {
         for (Map.Entry<String, Map<String, String>> configuration : existingConfigurations.entrySet()) {
@@ -478,12 +510,12 @@ public class KerberosHelperImpl implements KerberosHelper {
       }
 
       StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
-          .forStack(stackVersion.getStackName(), stackVersion.getStackVersion())
-          .forServices(new ArrayList<String>(services))
-          .forHosts(hostNames)
-          .withConfigurations(requestConfigurations)
-          .ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS)
-          .build();
+        .forStack(stackVersion.getStackName(), stackVersion.getStackVersion())
+        .forServices(new ArrayList<String>(services))
+        .forHosts(hostNames)
+        .withConfigurations(requestConfigurations)
+        .ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS)
+        .build();
 
       try {
         RecommendationResponse response = stackAdvisorHelper.recommend(request);
@@ -496,60 +528,145 @@ public class KerberosHelperImpl implements KerberosHelper {
           for (Map.Entry<String, RecommendationResponse.BlueprintConfigurations> configuration : configurations.entrySet()) {
             String configType = configuration.getKey();
             Map<String, String> recommendedConfigProperties = configuration.getValue().getProperties();
+            Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes = configuration.getValue().getPropertyAttributes();
             Map<String, String> existingConfigProperties = (existingConfigurations == null) ? null : existingConfigurations.get(configType);
             Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
             Set<String> ignoreProperties = (propertiesToIgnore == null) ? null : propertiesToIgnore.get(configType);
 
-            for (Map.Entry<String, String> property : recommendedConfigProperties.entrySet()) {
-              String propertyName = property.getKey();
-
-              if ((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) {
-                String recommendedValue = property.getValue();
+            addRecommendedPropertiesForConfigType(kerberosEnabled, kerberosConfigurations, configType,
+              recommendedConfigProperties,
+              existingConfigProperties, kerberosConfigProperties, ignoreProperties, propertiesToInsert);
+            if (recommendedConfigPropertyAttributes != null) {
+              removeRecommendedPropertiesForConfigType(kerberosEnabled, configType,
+                recommendedConfigPropertyAttributes,
+                existingConfigProperties,
+                kerberosConfigurations, ignoreProperties, propertiesToRemove);
+            }
+          }
+        }
 
-                if (kerberosConfigProperties == null) {
-                  // There is no explicit update for this property from the Kerberos Descriptor...
-                  // add the config and property if it also does not exist in the existing configurations
-                  if ((existingConfigProperties == null) || !existingConfigProperties.containsKey(propertyName)) {
-                    LOG.debug("Adding Kerberos configuration based on StackAdvisor recommendation:" +
-                            "\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
-                        configType, propertyName, recommendedValue);
+      } catch (Exception e) {
+        throw new AmbariException(e.getMessage(), e);
+      }
+    }
 
-                    HashMap<String, String> properties = new HashMap<String, String>();
-                    properties.put(propertyName, recommendedValue);
-                    kerberosConfigurations.put(configType, properties);
-                  }
-                } else {
-                  String value = kerberosConfigProperties.get(propertyName);
-                  if (value == null) {
-                    // There is no explicit update for this property from the Kerberos Descriptor...
-                    // add the property if it also does not exist in the existing configurations
-                    if ((existingConfigProperties == null) || !existingConfigProperties.containsKey(propertyName)) {
-                      LOG.debug("Adding Kerberos configuration based on StackAdvisor recommendation:" +
-                              "\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
-                          configType, propertyName, recommendedValue);
-
-                      kerberosConfigProperties.put(propertyName, recommendedValue);
-                    }
-                  } else if (!value.equals(recommendedValue)) {
-                    // If the recommended value is a change, automatically change it.
-                    LOG.debug("Updating Kerberos configuration based on StackAdvisor recommendation:" +
-                            "\n\tConfigType: {}\n\tProperty: {}\n\tOld Value: {}\n\tNew Value: {}",
-                        configType, propertyName, value, recommendedValue);
+    return kerberosConfigurations;
+  }
 
-                    kerberosConfigProperties.put(propertyName, recommendedValue);
-                  }
-                }
+  /*
+   * Recommended property will be added to kerberosConfigurations if kerberosEnabled or to propertiesToInsert
+   * otherwise.
+   */
+  private void addRecommendedPropertiesForConfigType(boolean kerberosEnabled, Map<String, Map<String, String>> kerberosConfigurations,
+                                                     String configType, Map<String, String> recommendedConfigProperties,
+                                                     Map<String, String> existingConfigProperties,
+                                                     Map<String, String> kerberosConfigProperties,
+                                                     Set<String> ignoreProperties, Map<String, Map<String, String>>
+                                                       propertiesToInsert) {
+
+    for (Map.Entry<String, String> property : recommendedConfigProperties.entrySet()) {
+      String propertyName = property.getKey();
+
+      if ((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) {
+        String recommendedValue = property.getValue();
+        if (kerberosEnabled) {
+          if (kerberosConfigProperties == null) {
+            // There is no explicit update for this property from the Kerberos Descriptor...
+            // add the config and property if it also does not exist in the existing configurations
+            if ((existingConfigProperties == null) || !existingConfigProperties.containsKey(propertyName)) {
+              LOG.debug("Adding Kerberos configuration based on StackAdvisor recommendation:" +
+                  "\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
+                configType, propertyName, recommendedValue);
+
+              HashMap<String, String> properties = new HashMap<String, String>();
+              properties.put(propertyName, recommendedValue);
+              kerberosConfigurations.put(configType, properties);
+            }
+          } else {
+            String value = kerberosConfigProperties.get(propertyName);
+            if (value == null) {
+              // There is no explicit update for this property from the Kerberos Descriptor...
+              // add the property if it also does not exist in the existing configurations
+              if ((existingConfigProperties == null) || !existingConfigProperties.containsKey(propertyName)) {
+                LOG.debug("Adding Kerberos configuration based on StackAdvisor recommendation:" +
+                    "\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
+                  configType, propertyName, recommendedValue);
+
+                kerberosConfigProperties.put(propertyName, recommendedValue);
               }
+            } else if (!value.equals(recommendedValue)) {
+              // If the recommended value is a change, automatically change it.
+              LOG.debug("Updating Kerberos configuration based on StackAdvisor recommendation:" +
+                  "\n\tConfigType: {}\n\tProperty: {}\n\tOld Value: {}\n\tNew Value: {}",
+                configType, propertyName, value, recommendedValue);
+
+              kerberosConfigProperties.put(propertyName, recommendedValue);
             }
           }
+        } else if (propertiesToInsert != null && ((existingConfigProperties == null) || !existingConfigProperties
+          .containsKey(propertyName))) {
+          Map<String, String> properties = propertiesToInsert.get(configType);
+          if (properties == null) {
+            properties = new HashMap<>();
+            propertiesToInsert.put(configType, properties);
+          }
+
+          LOG.debug("Property to add to configuration based on StackAdvisor recommendation:" +
+              "\n\tConfigType: {}\n\tProperty: {}\n\tValue: {}",
+            configType, propertyName, recommendedValue);
+
+          properties.put(propertyName, recommendedValue);
         }
 
-      } catch (Exception e) {
-        throw new AmbariException(e.getMessage(), e);
+
       }
     }
+  }
 
-    return kerberosConfigurations;
+  /**
+   * If property is marked with delete flag in recommendedConfigPropertyAttributes map and is not found in
+   * ignoreProperties, nor in kerberosConfigProperties but exits in existingConfigProperties add to
+   * propertiesToRemove map if kerberosEnabled or kerberosConfigurations otherwise.
+   */
+  private void removeRecommendedPropertiesForConfigType(boolean kerberosEnabled, String configType,
+                                                        Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes,
+                                                        Map<String, String> existingConfigProperties,
+                                                        Map<String, Map<String, String>> kerberosConfigurations,
+                                                        Set<String> ignoreProperties, Map<String, Set<String>>
+                                                          propertiesToRemove) {
+
+    for (Map.Entry<String, ValueAttributesInfo> property : recommendedConfigPropertyAttributes.entrySet()) {
+      String propertyName = property.getKey();
+      if ("true".equalsIgnoreCase(property.getValue().getDelete())) {
+        // if property is not in ignoreProperties, nor in kerberosConfigProperties but is found in existingConfigProperties
+        // add to propertiesToBeRemoved map
+        Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
+        if (((ignoreProperties == null) || !ignoreProperties.contains(propertyName)) &&
+          ((kerberosConfigProperties == null) || kerberosConfigProperties.get(propertyName) == null) &&
+          (existingConfigProperties != null && existingConfigProperties.containsKey(propertyName))) {
+
+          LOG.debug("Property to remove from configuration based on StackAdvisor recommendation:" +
+              "\n\tConfigType: {}\n\tProperty: {}",
+            configType, propertyName);
+
+          // kerberosEnabled add property to propertiesToRemove, otherwise to kerberosConfigurations map
+          if (kerberosEnabled && propertiesToRemove != null) {
+            Set<String> properties = propertiesToRemove.get(configType);
+            if (properties == null) {
+              properties = new HashSet<String>();
+              propertiesToRemove.put(configType, properties);
+            }
+            properties.add(propertyName);
+          } else {
+            if (kerberosConfigProperties == null) {
+              kerberosConfigProperties = new HashMap<String, String>();
+              kerberosConfigurations.put(configType, kerberosConfigProperties);
+            }
+            kerberosConfigProperties.put(propertyName, "");
+          }
+        }
+      }
+    }
   }
 
   @Override
@@ -609,6 +726,20 @@ public class KerberosHelperImpl implements KerberosHelper {
         }
       }
 
+      // create Ambari principal & keytab, configure JAAS only if 'kerberos-env.create_ambari_principal = true'
+      if (kerberosDetails.createAmbariPrincipal()) {
+        KerberosIdentityDescriptor ambariServerIdentity = kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME);
+        if (ambariServerIdentity != null) {
+          createUserIdentity(ambariServerIdentity, kerberosConfiguration, kerberosOperationHandler, configurations);
+          configureAmbariIdentity(ambariServerIdentity, kerberosOperationHandler, configurations);
+          try {
+            KerberosChecker.checkJaasConfiguration();
+          } catch(AmbariException e) {
+            LOG.error("Error in Ambari JAAS configuration: ", e);
+          }
+        }
+      }
+
       // The KerberosOperationHandler needs to be closed, if it fails to close ignore the
       // exception since there is little we can or care to do about it now.
       try {
@@ -622,11 +753,42 @@ public class KerberosHelperImpl implements KerberosHelper {
     return true;
   }
 
+  private boolean configureAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
+                                          KerberosOperationHandler kerberosOperationHandler, Map<String, Map<String, String>>
+                                            configurations) throws AmbariException {
+    boolean created = false;
+
+
+    KerberosPrincipalDescriptor principalDescriptor = ambariServerIdentity.getPrincipalDescriptor();
+    if (principalDescriptor != null) {
+      String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
+
+      // If this principal is already in the Ambari database, then don't try to recreate it or it's
+      // keytab file.
+      if (kerberosPrincipalDAO.exists(principal)) {
+
+        KerberosKeytabDescriptor keytabDescriptor = ambariServerIdentity.getKeytabDescriptor();
+        String keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
+
+        if (keytabDescriptor != null) {
+          injector.getInstance(ConfigureAmbariIndetityServerAction.class)
+            .createAndConfigureAmbariKeytab(principal, kerberosOperationHandler, keytabFilePath,
+              keytabDescriptor.getOwnerName(), keytabDescriptor.getOwnerAccess(), null);
+          // throw new AmbariException("Failed to create the keytab for " + principal);
+        }
+
+      }
+    }
+
+    return created;
+  }
+
   @Override
   public RequestStageContainer createTestIdentity(Cluster cluster, Map<String, String> commandParamsStage,
                                                   RequestStageContainer requestStageContainer)
       throws KerberosOperationException, AmbariException {
-    return handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer, new CreatePrincipalsAndKeytabsHandler(false, false));
+    return handleTestIdentity(cluster, getKerberosDetails(cluster, null), commandParamsStage, requestStageContainer,
+      new CreatePrincipalsAndKeytabsHandler(false, false, false));
   }
 
   @Override
@@ -2539,6 +2701,29 @@ public class KerberosHelperImpl implements KerberosHelper {
       requestStageContainer.addStages(roleGraph.getStages());
     }
 
+    public void addConfigureAmbariIdentityStage(Cluster cluster, String clusterHostInfoJson,
+                                          String hostParamsJson, ServiceComponentHostServerActionEvent event,
+                                          Map<String, String> commandParameters,
+                                          RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
+      throws AmbariException {
+      Stage stage = createServerActionStage(requestStageContainer.getLastStageId(),
+        cluster,
+        requestStageContainer.getId(),
+        "Configure Ambari Identity",
+        clusterHostInfoJson,
+        "{}",
+        hostParamsJson,
+        ConfigureAmbariIndetityServerAction.class,
+        event,
+        commandParameters,
+        "Configure Ambari Identity",
+        configuration.getDefaultServerTaskTimeout());
+
+      RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
+      roleGraph.build(stage);
+      requestStageContainer.addStages(roleGraph.getStages());
+    }
+
     public void addCreateKeytabFilesStage(Cluster cluster, String clusterHostInfoJson,
                                           String hostParamsJson, ServiceComponentHostServerActionEvent event,
                                           Map<String, String> commandParameters,
@@ -2840,12 +3025,19 @@ public class KerberosHelperImpl implements KerberosHelper {
         // *****************************************************************
         // Create stage to create principals
         addCreatePrincipalsStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
-            roleCommandOrder, requestStageContainer);
+          roleCommandOrder, requestStageContainer);
 
         // *****************************************************************
         // Create stage to generate keytabs
         addCreateKeytabFilesStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
+          roleCommandOrder, requestStageContainer);
+
+        // *****************************************************************
+        // Create stage to distribute and configure keytab for Ambari server and configure JAAS
+        if (kerberosDetails.createAmbariPrincipal()) {
+          addConfigureAmbariIdentityStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
             roleCommandOrder, requestStageContainer);
+        }
 
         // *****************************************************************
         // Create stage to distribute keytabs
@@ -2998,6 +3190,12 @@ public class KerberosHelperImpl implements KerberosHelper {
     private boolean updateConfigurations;
 
     /**
+     * A boolean value indicating whether to include Ambari server identity (<code>true</code>)
+     * or ignore it (<code>false</code>).
+     */
+    private boolean includeAmbariIdentity;
+
+    /**
      * CreatePrincipalsAndKeytabsHandler constructor to set whether this instance should be used to
      * regenerate all keytabs or just the ones that have not been distributed
      *
@@ -3008,9 +3206,11 @@ public class KerberosHelperImpl implements KerberosHelper {
      *                             (<code>true</code>) or ignore any potential configuration changes
      *                             (<code>false</code>)
      */
-    public CreatePrincipalsAndKeytabsHandler(boolean regenerateAllKeytabs, boolean updateConfigurations) {
+    public CreatePrincipalsAndKeytabsHandler(boolean regenerateAllKeytabs, boolean updateConfigurations, boolean
+      includeAmbariIdentity) {
       this.regenerateAllKeytabs = regenerateAllKeytabs;
       this.updateConfigurations = updateConfigurations;
+      this.includeAmbariIdentity = includeAmbariIdentity;
     }
 
     @Override
@@ -3077,6 +3277,7 @@ public class KerberosHelperImpl implements KerberosHelper {
       }
 
       commandParameters.put(KerberosServerAction.REGENERATE_ALL, (regenerateAllKeytabs) ? "true" : "false");
+      commandParameters.put(KerberosServerAction.INCLUDE_AMBARI_IDENTITY, (includeAmbariIdentity) ? "true" : "false");
 
       if (updateConfigurations) {
         commandParameters.put(KerberosServerAction.UPDATE_CONFIGURATION_NOTE, "Updated Kerberos-related configurations");
@@ -3102,6 +3303,13 @@ public class KerberosHelperImpl implements KerberosHelper {
             commandParameters, roleCommandOrder, requestStageContainer);
 
         // *****************************************************************
+        // Create stage to distribute and configure keytab for Ambari server and configure JAAS
+        if (includeAmbariIdentity && kerberosDetails.createAmbariPrincipal()) {
+          addConfigureAmbariIdentityStage(cluster, clusterHostInfoJson, hostParamsJson, event, commandParameters,
+            roleCommandOrder, requestStageContainer);
+        }
+
+        // *****************************************************************
         // Create stage to distribute keytabs
         addDistributeKeytabFilesStage(cluster, serviceComponentHosts, clusterHostInfoJson,
             hostParamsJson, commandParameters, roleCommandOrder, requestStageContainer, hostsWithValidKerberosClient);
@@ -3223,6 +3431,7 @@ public class KerberosHelperImpl implements KerberosHelper {
     private Map<String, String> kerberosEnvProperties;
     private SecurityType securityType;
     private Boolean manageIdentities;
+    private Boolean createAmbariPrincipal;
 
     public void setDefaultRealm(String defaultRealm) {
       this.defaultRealm = defaultRealm;
@@ -3259,12 +3468,21 @@ public class KerberosHelperImpl implements KerberosHelper {
     public boolean manageIdentities() {
       if (manageIdentities == null) {
         return (kerberosEnvProperties == null) ||
-            !"false".equalsIgnoreCase(kerberosEnvProperties.get("manage_identities"));
+            !"false".equalsIgnoreCase(kerberosEnvProperties.get(MANAGE_IDENTITIES));
       } else {
         return manageIdentities;
       }
     }
 
+    public boolean createAmbariPrincipal() {
+      if (createAmbariPrincipal == null) {
+        return (kerberosEnvProperties == null) ||
+          !"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
+      } else {
+        return createAmbariPrincipal;
+      }
+    }
+
     public void setManageIdentities(Boolean manageIdentities) {
       this.manageIdentities = manageIdentities;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index 3a945ad..793ff6b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -36,7 +36,9 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.io.IOException;
 import java.lang.reflect.Type;
+import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -63,7 +65,10 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
   protected void processServiceComponentHosts(Cluster cluster, KerberosDescriptor kerberosDescriptor, List<ServiceComponentHost> schToProcess,
                                               Collection<String> identityFilter, String dataDirectory,
                                               Map<String, Map<String, String>> kerberosConfigurations,
-                                              boolean kerberosEnabled) throws AmbariException {
+                                              Map<String, Map<String, String>> propertiesToInsert,
+                                              Map<String, Set<String>> propertiesToRemove,
+                                              boolean kerberosEnabled, boolean includeAmbariIdentity) throws
+    AmbariException {
 
     actionLog.writeStdOut("Processing Kerberos identities and configurations");
 
@@ -99,55 +104,71 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
         Set<String> services = new HashSet<String>();
         Map<String, Set<String>> propertiesToIgnore = null;
 
-        // Iterate over the components installed on the current host to get the service and
-        // component-level Kerberos descriptors in order to determine which principals,
-        // keytab files, and configurations need to be created or updated.
-        for (ServiceComponentHost sch : schToProcess) {
-          String hostName = sch.getHostName();
+        try {
 
-          try {
-              String serviceName = sch.getServiceName();
-              String componentName = sch.getServiceComponentName();
+          // Iterate over the components installed on the current host to get the service and
+          // component-level Kerberos descriptors in order to determine which principals,
+          // keytab files, and configurations need to be created or updated.
+          for (ServiceComponentHost sch : schToProcess) {
+            String hostName = sch.getHostName();
 
-              KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
+            String serviceName = sch.getServiceName();
+            String componentName = sch.getServiceComponentName();
 
-              if (serviceDescriptor != null) {
-                List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
+            KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName);
 
-                // Add service-level principals (and keytabs)
-                kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
-                    identityFilter, hostName, serviceName, componentName, kerberosConfigurations, configurations);
-                propertiesToIgnore = gatherPropertiesToIgnore(serviceIdentities, propertiesToIgnore);
+            if (serviceDescriptor != null) {
+              List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
+
+              // Add service-level principals (and keytabs)
+              kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
+                  identityFilter, hostName, serviceName, componentName, kerberosConfigurations, configurations);
+              propertiesToIgnore = gatherPropertiesToIgnore(serviceIdentities, propertiesToIgnore);
 
-                KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
+              KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
 
-                if (componentDescriptor != null) {
-                  List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
+              if (componentDescriptor != null) {
+                List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
 
-                  // Calculate the set of configurations to update and replace any variables
-                  // using the previously calculated Map of configurations for the host.
-                  kerberosHelper.mergeConfigurations(kerberosConfigurations,
-                      componentDescriptor.getConfigurations(true), configurations);
+                // Calculate the set of configurations to update and replace any variables
+                // using the previously calculated Map of configurations for the host.
+                kerberosHelper.mergeConfigurations(kerberosConfigurations,
+                    componentDescriptor.getConfigurations(true), configurations);
 
-                  // Add component-level principals (and keytabs)
-                  kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
-                      identityFilter, hostName, serviceName, componentName, kerberosConfigurations, configurations);
-                  propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
-                }
+                // Add component-level principals (and keytabs)
+                kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
+                    identityFilter, hostName, serviceName, componentName, kerberosConfigurations, configurations);
+                propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
               }
+            }
 
-              services.add(serviceName);
-          } catch (IOException e) {
-            String message = String.format("Failed to write index file - %s", identityDataFile.getAbsolutePath());
-            LOG.error(message, e);
-            actionLog.writeStdOut(message);
-            actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
-            throw new AmbariException(message, e);
+            services.add(serviceName);
           }
+
+          // Add ambari-server principal (and keytab) only if 'kerberos-env.create_ambari_principal = true'
+          Map<String, String> kerberosEnvProperties = configurations.get("kerberos-env");
+          if (kerberosEnvProperties != null && kerberosEnvProperties.get(KerberosHelper.CREATE_AMBARI_PRINCIPAL) != null
+            && "true".equalsIgnoreCase(kerberosEnvProperties.get(KerberosHelper.CREATE_AMBARI_PRINCIPAL))
+              && includeAmbariIdentity) {
+            KerberosIdentityDescriptor ambariServerIdentity = kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME);
+            if (ambariServerIdentity != null) {
+              List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(ambariServerIdentity);
+              kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
+                identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI_SEVER", "AMBARI_SEVER", kerberosConfigurations, configurations);
+              propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
+            }
+          }
+
+        } catch (IOException e) {
+          String message = String.format("Failed to write index file - %s", identityDataFile.getAbsolutePath());
+          LOG.error(message, e);
+          actionLog.writeStdOut(message);
+          actionLog.writeStdErr(message + "\n" + e.getLocalizedMessage());
+          throw new AmbariException(message, e);
         }
 
         kerberosHelper.applyStackAdvisorUpdates(cluster, services, configurations, kerberosConfigurations,
-            propertiesToIgnore, kerberosEnabled);
+            propertiesToIgnore, propertiesToInsert, propertiesToRemove, kerberosEnabled);
       }
       finally {
         if (kerberosIdentityDataFileWriter != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
new file mode 100644
index 0000000..03f9c69
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIndetityServerAction.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.utilities.KerberosChecker;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
+import org.apache.ambari.server.orm.entities.KerberosPrincipalEntity;
+import org.apache.ambari.server.serveraction.ActionLog;
+import org.apache.commons.io.FileUtils;
+import org.apache.directory.server.kerberos.shared.keytab.Keytab;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.Inject;
+
+/**
+ * ConfigureAmbariIndetityServerAction is a ServerAction implementation that creates keytab files as
+ * instructed.
+ * <p/>
+ * This class mainly relies on the KerberosServerAction to iterate through metadata identifying
+ * the Kerberos keytab files that need to be created. For each identity in the metadata, this
+ * implementation's
+ * {@link KerberosServerAction#processIdentity(Map, String, KerberosOperationHandler, Map, Map)}
+ * is invoked attempting the creation of the relevant keytab file.
+ */
+public class ConfigureAmbariIndetityServerAction extends KerberosServerAction {
+
+
+  private static final String KEYTAB_PATTERN = "keyTab=\"(.+)?\"";
+  private static final String PRINCIPAL_PATTERN = "principal=\"(.+)?\"";
+
+  private final static Logger LOG = LoggerFactory.getLogger(ConfigureAmbariIndetityServerAction.class);
+
+  /**
+   * KerberosPrincipalDAO used to set and get Kerberos principal details
+   */
+  @Inject
+  private KerberosPrincipalDAO kerberosPrincipalDAO;
+
+
+  /**
+   * Called to execute this action.  Upon invocation, calls
+   * {@link KerberosServerAction#processIdentities(Map)} )}
+   * to iterate through the Kerberos identity metadata and call
+   * {@link ConfigureAmbariIndetityServerAction#processIdentities(Map)}
+   * for each identity to process.
+   *
+   * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+   *                                 to a given request
+   * @return a CommandReport indicating the result of this action
+   * @throws AmbariException
+   * @throws InterruptedException
+   */
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext) throws
+    AmbariException, InterruptedException {
+    return processIdentities(requestSharedDataContext);
+  }
+
+
+  /**
+   * Creates keytab file for ambari-server identity.
+   * <p/>
+   * It is expected that the {@link CreatePrincipalsServerAction}
+   * (or similar) and {@link CreateKeytabFilesServerAction} has executed before this action.
+   *
+   * @param identityRecord           a Map containing the data for the current identity record
+   * @param evaluatedPrincipal       a String indicating the relevant principal
+   * @param operationHandler         a KerberosOperationHandler used to perform Kerberos-related
+   *                                 tasks for specific Kerberos implementations
+   *                                 (MIT, Active Directory, etc...)
+   * @param kerberosConfiguration    a Map of configuration properties from kerberos-env
+   * @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
+   *                                 to a given request  @return a CommandReport, indicating an error
+   *                                 condition; or null, indicating a success condition
+   * @throws AmbariException if an error occurs while processing the identity record
+   */
+  @Override
+  protected CommandReport processIdentity(Map<String, String> identityRecord, String evaluatedPrincipal,
+                                          KerberosOperationHandler operationHandler,
+                                          Map<String, String> kerberosConfiguration,
+                                          Map<String, Object> requestSharedDataContext)
+    throws AmbariException {
+    CommandReport commandReport = null;
+
+    if (identityRecord != null) {
+      String message;
+      String dataDirectory = getDataDirectoryPath();
+
+      if (operationHandler == null) {
+        message = String.format("Failed to create keytab file for %s, missing KerberosOperationHandler", evaluatedPrincipal);
+        actionLog.writeStdErr(message);
+        LOG.error(message);
+        commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+      } else if (dataDirectory == null) {
+        message = "The data directory has not been set. Generated keytab files can not be stored.";
+        LOG.error(message);
+        commandReport = createCommandReport(1, HostRoleStatus.FAILED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
+      } else {
+
+        String hostName = identityRecord.get(KerberosIdentityDataFileReader.HOSTNAME);
+        if (hostName != null && hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
+          String keytabFilePath = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
+          String keytabOwner = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME);
+          String keytabAccess = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS);
+          createAndConfigureAmbariKeytab(evaluatedPrincipal, operationHandler, keytabFilePath, keytabOwner, keytabAccess, actionLog);
+        }
+      }
+    }
+
+    return commandReport;
+  }
+
+  public boolean createAndConfigureAmbariKeytab(String principal, KerberosOperationHandler operationHandler,
+                                    String keytabFilePath, String keytabOwner, String keytabAccess, ActionLog
+                                      actionLog) throws AmbariException {
+
+    KerberosPrincipalEntity principalEntity = kerberosPrincipalDAO.find(principal);
+    String cachedKeytabPath = (principalEntity == null) ? null : principalEntity.getCachedKeytabPath();
+
+    if (cachedKeytabPath == null) {
+      return false;
+    }
+
+    Keytab keytab = null;
+    try {
+      keytab = Keytab.read(new File(cachedKeytabPath));
+    } catch (IOException e) {
+      String message = String.format("Failed to read the cached keytab for %s, recreating if possible - %b",
+        principal, e.getMessage());
+      if (LOG.isDebugEnabled()) {
+        LOG.warn(message, e);
+      } else {
+        LOG.warn(message, e);
+      }
+    }
+
+    if (keytab == null) {
+      return false;
+    }
+
+    File keytabFile = new File(keytabFilePath);
+    ensureKeytabFolderExists(keytabFilePath);
+    try {
+      boolean created = operationHandler.createKeytabFile(keytab, keytabFile);
+      String message = String.format("Keytab successfully created: %s for principal %s", created, principal);
+      if (actionLog != null) {
+        actionLog.writeStdOut(message);
+      }
+      if (created) {
+        ensureAmbariOnlyAccess(keytabFile);
+        configureJAAS(principal, keytabFilePath, actionLog);
+      }
+      return created;
+    } catch (KerberosOperationException e) {
+      String message = String.format("Failed to create keytab file for %s - %s", principal, e.getMessage());
+      if (actionLog != null) {
+        actionLog.writeStdErr(message);
+      }
+      LOG.error(message, e);
+    }
+
+    return false;
+  }
+
+  private void ensureKeytabFolderExists(String keytabFilePath) {
+    String keytabFolderPath = keytabFilePath.substring(0, keytabFilePath.lastIndexOf("/"));
+    File keytabFolder = new File(keytabFolderPath);
+    if (!keytabFolder.exists() || !keytabFolder.isDirectory()) {
+      keytabFolder.mkdir();
+    }
+  }
+
+  private void configureJAAS(String evaluatedPrincipal, String keytabFilePath, ActionLog actionLog) {
+    String jaasConfPath = System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+    File jaasConfigFile = new File(jaasConfPath);
+    try {
+      String jaasConfig = FileUtils.readFileToString(jaasConfigFile);
+      File oldJaasConfigFile = new File(jaasConfPath + ".bak");
+      FileUtils.writeStringToFile(oldJaasConfigFile, jaasConfig);
+      jaasConfig = jaasConfig.replaceFirst(KEYTAB_PATTERN, "keyTab=\"" + keytabFilePath + "\"");
+      jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + evaluatedPrincipal + "\"");
+      FileUtils.writeStringToFile(jaasConfigFile, jaasConfig);
+      String message = String.format("JAAS config file %s modified successfully for principal %s.", jaasConfigFile
+        .getName(), evaluatedPrincipal);
+      if (actionLog != null) {
+        actionLog.writeStdOut(message);
+      }
+    } catch (IOException e) {
+      String message = String.format("Failed to configure JAAS file %s for %s - %s", jaasConfigFile,
+        evaluatedPrincipal, e.getMessage());
+      if (actionLog != null) {
+        actionLog.writeStdErr(message);
+      }
+      LOG.error(message, e);
+    }
+
+  }
+
+  /**
+   * Ensures that the owner of the Ambari server process is the only local user account able to
+   * read and write to the specified file or read, write to, and execute the specified directory.
+   *
+   * @param file the file or directory for which to modify access
+   */
+  protected void ensureAmbariOnlyAccess(File file) throws AmbariException {
+    if (file.exists()) {
+      if (!file.setReadable(false, false) || !file.setReadable(true, true)) {
+        String message = String.format("Failed to set %s readable only by Ambari", file.getAbsolutePath());
+        LOG.warn(message);
+        throw new AmbariException(message);
+      }
+
+      if (!file.setWritable(false, false) || !file.setWritable(true, true)) {
+        String message = String.format("Failed to set %s writable only by Ambari", file.getAbsolutePath());
+        LOG.warn(message);
+        throw new AmbariException(message);
+      }
+
+      if (file.isDirectory()) {
+        if (!file.setExecutable(false, false) || !file.setExecutable(true, true)) {
+          String message = String.format("Failed to set %s executable by Ambari", file.getAbsolutePath());
+          LOG.warn(message);
+          throw new AmbariException(message);
+        }
+      } else {
+        if (!file.setExecutable(false, false)) {
+          String message = String.format("Failed to set %s not executable", file.getAbsolutePath());
+          LOG.warn(message);
+          throw new AmbariException(message);
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
index 8aa816d..81dae0e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
@@ -23,6 +23,7 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
@@ -196,7 +197,8 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
             if (hostDirectory.exists()) {
               File destinationKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(keytabFilePath));
               HostEntity hostEntity = hostDAO.findByName(hostName);
-              if (hostEntity == null) {
+              // in case of ambari-server identity there's no host entity for ambari_server host
+              if (hostEntity == null && !hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
                 message = "Failed to find HostEntity for hostname = " + hostName;
                 actionLog.writeStdErr(message);
                 LOG.error(message);
@@ -205,7 +207,8 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
               }
 
               if (password == null) {
-                if (kerberosPrincipalHostDAO.exists(evaluatedPrincipal, hostEntity.getHostId())) {
+                if (hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME) || kerberosPrincipalHostDAO
+                  .exists(evaluatedPrincipal, hostEntity.getHostId())) {
                   // There is nothing to do for this since it must already exist and we don't want to
                   // regenerate the keytab
                   message = String.format("Skipping keytab file for %s, missing password indicates nothing to do", evaluatedPrincipal);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
index 93daae8..c2d8f6a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/DestroyPrincipalsServerAction.java
@@ -21,6 +21,7 @@ package org.apache.ambari.server.serveraction.kerberos;
 import com.google.inject.Inject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
 import org.apache.ambari.server.orm.entities.KerberosPrincipalEntity;
 import org.slf4j.Logger;
@@ -113,6 +114,17 @@ public class DestroyPrincipalsServerAction extends KerberosServerAction {
           }
         }
       }
+
+      // delete Ambari server keytab
+      String hostName = identityRecord.get(KerberosIdentityDataFileReader.HOSTNAME);
+      if (hostName != null && hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME)) {
+        String keytabFilePath = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_PATH);
+        if (keytabFilePath != null) {
+          if (!new File(keytabFilePath).delete()) {
+            LOG.debug(String.format("Failed to remove ambari keytab for %s", evaluatedPrincipal));
+          }
+        }
+      }
     }
     catch (Throwable t) {
       message = String.format("Failed to remove identity for %s from the Ambari database - %s", evaluatedPrincipal, t.getMessage());

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
index 90d9414..db210e0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java
@@ -124,6 +124,12 @@ public abstract class KerberosServerAction extends AbstractServerAction {
   */
   public static final String REGENERATE_ALL = "regenerate_all";
 
+  /*
+  * Key used in kerberosCommandParams in ExecutionCommand to indicate whether to include Ambari server indetity
+  * ("true") or ignore it ("false")
+  */
+  public static final String INCLUDE_AMBARI_IDENTITY = "include_ambari_identity";
+
   private static final Logger LOG = LoggerFactory.getLogger(KerberosServerAction.class);
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
index 5d22385..5c56588 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
@@ -113,7 +113,9 @@ public class PrepareDisableKerberosServerAction extends AbstractPrepareKerberosS
       actionLog.writeStdOut(String.format("Processing %d components", schCount));
     }
 
-    processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory, kerberosConfigurations, false);
+    Map<String, Map<String, String>> propertiesToInsert = new HashMap<>();
+    processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
+      kerberosConfigurations, propertiesToInsert, null, false, true);
 
     // Add auth-to-local configurations to the set of changes
     Set<String> authToLocalProperties = kerberosDescriptor.getAllAuthToLocalProperties();
@@ -171,6 +173,21 @@ public class PrepareDisableKerberosServerAction extends AbstractPrepareKerberosS
       // or properties and the logic below will remove all from this set - which is not desirable.
       configurationsToRemove.remove("cluster-env");
 
+      // Update kerberosConfigurations with properties recommended by stack advisor
+      for (Map.Entry<String, Map<String, String>> typeEntry : propertiesToInsert.entrySet()) {
+        String configType = typeEntry.getKey();
+        Map<String, String> propertiesMap = typeEntry.getValue();
+
+        Map<String, String> kerberosPropertiesMap = kerberosConfigurations.get(configType);
+        if (kerberosPropertiesMap == null) {
+          kerberosConfigurations.put(configType, propertiesMap);
+        } else {
+          for (Map.Entry<String, String> propertyEntry : propertiesMap.entrySet()) {
+            kerberosPropertiesMap.put(propertyEntry.getKey(), propertyEntry.getValue());
+          }
+        }
+      }
+
       if (!schToProcess.isEmpty()) {
         Set<String> visitedServices = new HashSet<String>();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
index a7c3861..70b7135 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
@@ -30,6 +30,7 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
 
 /**
@@ -78,7 +79,9 @@ public class PrepareEnableKerberosServerAction extends PrepareKerberosIdentities
       actionLog.writeStdOut(String.format("Processing %d components", schCount));
     }
 
-    processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory, kerberosConfigurations, true);
+    Map<String, Set<String>> propertiesToBeRemoved = new HashMap<>();
+    processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
+      kerberosConfigurations, null, propertiesToBeRemoved, true, true);
     processAuthToLocalRules(cluster, kerberosDescriptor, schToProcess, kerberosConfigurations, getDefaultRealm(commandParameters));
 
     // Ensure the cluster-env/security_enabled flag is set properly
@@ -89,7 +92,7 @@ public class PrepareEnableKerberosServerAction extends PrepareKerberosIdentities
     }
     clusterEnvProperties.put(KerberosHelper.SECURITY_ENABLED_PROPERTY_NAME, "true");
 
-    processConfigurationChanges(dataDirectory, kerberosConfigurations);
+    processConfigurationChanges(dataDirectory, kerberosConfigurations, propertiesToBeRemoved);
 
     return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
index 8f8f67d..f70c546 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
@@ -35,6 +35,7 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
 
 /**
@@ -93,11 +94,13 @@ public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerber
       actionLog.writeStdOut(String.format("Processing %d components", schCount));
     }
 
-    processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory, kerberosConfigurations, true);
+    processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
+      kerberosConfigurations, null, null, true, "true".equalsIgnoreCase(getCommandParameterValue(commandParameters,
+        KerberosServerAction.INCLUDE_AMBARI_IDENTITY)));
 
     if ("true".equalsIgnoreCase(getCommandParameterValue(commandParameters, UPDATE_CONFIGURATIONS))) {
       processAuthToLocalRules(cluster, kerberosDescriptor, schToProcess, kerberosConfigurations, getDefaultRealm(commandParameters));
-      processConfigurationChanges(dataDirectory, kerberosConfigurations);
+      processConfigurationChanges(dataDirectory, kerberosConfigurations, null);
     }
 
     return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
@@ -187,10 +190,12 @@ public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerber
    *
    * @param dataDirectory          the directory in which to write the configuration changes data file
    * @param kerberosConfigurations the Kerberos-specific configuration map
+   * @param propertiesToBeRemoved
    * @throws AmbariException
    */
   protected void processConfigurationChanges(String dataDirectory,
-                                             Map<String, Map<String, String>> kerberosConfigurations)
+                                             Map<String, Map<String, String>> kerberosConfigurations,
+                                             Map<String, Set<String>> propertiesToBeRemoved)
       throws AmbariException {
     actionLog.writeStdOut("Determining configuration changes");
 
@@ -209,7 +214,7 @@ public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerber
       actionLog.writeStdOut(String.format("Writing configuration changes metadata file to %s", configFile.getAbsolutePath()));
       try {
         kerberosConfDataFileWriter = kerberosConfigDataFileWriterFactory.createKerberosConfigDataFileWriter(configFile);
-
+        // add properties to be set
         for (Map.Entry<String, Map<String, String>> entry : kerberosConfigurations.entrySet()) {
           String type = entry.getKey();
           Map<String, String> properties = entry.getValue();
@@ -223,6 +228,22 @@ public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerber
             }
           }
         }
+        // add properties to be removed
+        if (propertiesToBeRemoved != null) {
+          for (Map.Entry<String, Set<String>> entry : propertiesToBeRemoved.entrySet()) {
+            String type = entry.getKey();
+            Set<String> properties = entry.getValue();
+
+            if (properties != null) {
+              for (String property : properties) {
+                kerberosConfDataFileWriter.addRecord(type,
+                  property,
+                  "",
+                  KerberosConfigDataFileWriter.OPERATION_TYPE_REMOVE);
+              }
+            }
+          }
+        }
       } catch (IOException e) {
         String message = String.format("Failed to write kerberos configurations file - %s", configFile.getAbsolutePath());
         LOG.error(message, e);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index b56bcbf..bb880e2 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -304,6 +304,20 @@
   </property>
 
   <property>
+  <name>create_ambari_principal</name>
+  <description>
+    Indicates whether Ambari should create the principal, keytab for itself, used by different views.
+  </description>
+  <value>true</value>
+  <display-name>Create Ambari Principal &amp; Keytab</display-name>
+  <value-attributes>
+    <visible>true</visible>
+    <overridable>false</overridable>
+    <type>boolean</type>
+  </value-attributes>
+</property>
+
+  <property>
     <name>kdc_create_attributes</name>
     <display-name>Principal Attributes</display-name>
     <description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
index 7b1888b..d839df6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
@@ -43,6 +43,22 @@
         },
         "configuration": "cluster-env/smokeuser_keytab"
       }
+    },
+    {
+      "name": "ambari-server",
+      "principal": {
+        "value": "ambari-server-${cluster_name}@${realm}",
+        "type" : "user",
+        "configuration": "cluster-env/ambari_principal_name"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/ambari.server.keytab",
+        "owner": {
+          "name": "root",
+          "access": "r"
+        }
+      }
     }
   ]
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 4b99081..94a6e68 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -132,6 +132,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
   def recommendYARNConfigurations(self, configurations, clusterData, services, hosts):
     putYarnProperty = self.putProperty(configurations, "yarn-site", services)
+    putYarnPropertyAttribute = self.putPropertyAttribute(configurations, "yarn-site")
     putYarnEnvProperty = self.putProperty(configurations, "yarn-env", services)
     nodemanagerMinRam = 1048576 # 1TB in mb
     if "referenceNodeManagerHost" in clusterData:
@@ -145,6 +146,18 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
       containerExecutorGroup = services['configurations']['cluster-env']['properties']['user_group']
     putYarnProperty("yarn.nodemanager.linux-container-executor.group", containerExecutorGroup)
 
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    if "TEZ" in servicesList:
+        ambari_user = self.getAmbariUser(services)
+        putYarnProperty("yarn.timeline-service.http-authentication.proxyuser.{0}.hosts".format(ambari_user), "*")
+        putYarnProperty("yarn.timeline-service.http-authentication.proxyuser.{0}.groups".format(ambari_user), "*")
+        putYarnProperty("yarn.timeline-service.http-authentication.proxyuser.{0}.users".format(ambari_user), "*")
+        old_ambari_user = self.getOldAmbariUser(services)
+        if old_ambari_user is not None:
+            putYarnPropertyAttribute("yarn.timeline-service.http-authentication.proxyuser.{0}.hosts".format(old_ambari_user), 'delete', 'true')
+            putYarnPropertyAttribute("yarn.timeline-service.http-authentication.proxyuser.{0}.groups".format(old_ambari_user), 'delete', 'true')
+            putYarnPropertyAttribute("yarn.timeline-service.http-authentication.proxyuser.{0}.users".format(old_ambari_user), 'delete', 'true')
+
   def recommendMapReduce2Configurations(self, configurations, clusterData, services, hosts):
     putMapredProperty = self.putProperty(configurations, "mapred-site", services)
     putMapredProperty('yarn.app.mapreduce.am.resource.mb', int(clusterData['amMemory']))
@@ -155,6 +168,37 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     putMapredProperty('mapreduce.reduce.java.opts', "-Xmx" + str(int(round(0.8 * clusterData['reduceMemory']))) + "m")
     putMapredProperty('mapreduce.task.io.sort.mb', min(int(round(0.4 * clusterData['mapMemory'])), 1024))
 
+  def getAmbariUser(self, services):
+    ambari_user = services['ambari-server-properties']['ambari-server.user']
+    if "cluster-env" in services["configurations"] \
+          and "ambari_principal_name" in services["configurations"]["cluster-env"]["properties"] \
+                and "security_enabled" in services["configurations"]["cluster-env"]["properties"] \
+                    and services["configurations"]["cluster-env"]["properties"]["security_enabled"].lower() == "true":
+      ambari_user = services["configurations"]["cluster-env"]["properties"]["ambari_principal_name"]
+      ambari_user = ambari_user.split('@')[0]
+    return ambari_user
+
+  def getOldAmbariUser(self, services):
+    ambari_user = None
+    if "cluster-env" in services["configurations"]:
+      if "security_enabled" in services["configurations"]["cluster-env"]["properties"] \
+              and services["configurations"]["cluster-env"]["properties"]["security_enabled"].lower() == "true":
+         ambari_user = services['ambari-server-properties']['ambari-server.user']
+      elif "ambari_principal_name" in services["configurations"]["cluster-env"]["properties"]:
+         ambari_user = services["configurations"]["cluster-env"]["properties"]["ambari_principal_name"]
+         ambari_user = ambari_user.split('@')[0]
+    return ambari_user
+
+  def recommendAmbariProxyUsersForHDFS(self, services, servicesList, putCoreSiteProperty, putCoreSitePropertyAttribute):
+      if "HDFS" in servicesList:
+          ambari_user = self.getAmbariUser(services)
+          putCoreSiteProperty("hadoop.proxyuser.{0}.hosts".format(ambari_user), "*")
+          putCoreSiteProperty("hadoop.proxyuser.{0}.groups".format(ambari_user), "*")
+          old_ambari_user = self.getOldAmbariUser(services)
+          if old_ambari_user is not None:
+            putCoreSitePropertyAttribute("hadoop.proxyuser.{0}.hosts".format(old_ambari_user), 'delete', 'true')
+            putCoreSitePropertyAttribute("hadoop.proxyuser.{0}.groups".format(old_ambari_user), 'delete', 'true')
+
   def recommendHadoopProxyUsers (self, configurations, services, hosts):
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     users = {}
@@ -233,6 +277,8 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         services["forced-configurations"].append({"type" : "core-site", "name" : "hadoop.proxyuser.{0}.hosts".format(user_name)})
         services["forced-configurations"].append({"type" : "core-site", "name" : "hadoop.proxyuser.{0}.groups".format(user_name)})
 
+    self.recommendAmbariProxyUsersForHDFS(services, servicesList, putCoreSiteProperty, putCoreSitePropertyAttribute)
+
   def recommendHDFSConfigurations(self, configurations, clusterData, services, hosts):
     putHDFSProperty = self.putProperty(configurations, "hadoop-env", services)
     putHDFSSiteProperty = self.putProperty(configurations, "hdfs-site", services)

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
index 3a552b8..88892f2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
@@ -93,6 +93,19 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
         dbConnection = self.getDBConnectionString(hiveEnvProperties['hive_database']).format(hiveMSHost['Hosts']['host_name'], hiveSiteProperties['ambari.hive.db.schema.name'])
         putHiveProperty('javax.jdo.option.ConnectionURL', dbConnection)
 
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    if "PIG" in servicesList:
+        ambari_user = self.getAmbariUser(services)
+        webHcatSiteProperty = self.putProperty(configurations, "webhcat-site", services)
+        webHcatSiteProperty("webhcat.proxyuser.{0}.hosts".format(ambari_user), "*")
+        webHcatSiteProperty("webhcat.proxyuser.{0}.groups".format(ambari_user), "*")
+        old_ambari_user = self.getOldAmbariUser(services)
+        if old_ambari_user is not None:
+            webHcatSitePropertyAttributes = self.putPropertyAttribute(configurations, "webhcat-site", services)
+            webHcatSitePropertyAttributes("webhcat.proxyuser.{0}.hosts".format(old_ambari_user), 'delete', 'true')
+            webHcatSitePropertyAttributes("webhcat.proxyuser.{0}.groups".format(old_ambari_user), 'delete', 'true')
+
+
   def recommendTezConfigurations(self, configurations, clusterData, services, hosts):
     putTezProperty = self.putProperty(configurations, "tez-site")
     putTezProperty("tez.am.resource.memory.mb", int(clusterData['amMemory']))

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c008c83/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index f6027f3..0378a02 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -975,6 +975,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosEnvProperties.get("kdc_type")).andReturn(kdcType).anyTimes();
     expect(kerberosEnvProperties.get("manage_identities")).andReturn(manageIdentities).anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
+    expect(kerberosEnvProperties.get("create_ambari_principal")).andReturn("false").anyTimes();
 
     final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
@@ -1196,6 +1197,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosEnvProperties.get("kdc_type")).andReturn(kdcType).anyTimes();
     expect(kerberosEnvProperties.get("manage_identities")).andReturn(manageIdentities).anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
+    expect(kerberosEnvProperties.get("create_ambari_principal")).andReturn("false").anyTimes();
 
     final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
@@ -1627,6 +1629,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Map<String, String> kerberosEnvProperties = createMock(Map.class);
     expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").anyTimes();
     expect(kerberosEnvProperties.get("realm")).andReturn("FOOBAR.COM").anyTimes();
+    expect(kerberosEnvProperties.get("create_ambari_principal")).andReturn("false").anyTimes();
 
     final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
@@ -2052,6 +2055,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         put("kdc_type", "mit-kdc");
         put("realm", "FOOBAR.COM");
         put("case_insensitive_username_rules", "false");
+        put("create_ambari_principal", "false");
       }
     };
 
@@ -2236,16 +2240,17 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(metaInfo.getKerberosDescriptor("HDP", "2.2")).andReturn(kerberosDescriptor).atLeastOnce();
     expect(clusterController.ensureResourceProvider(Resource.Type.Artifact)).andReturn(artifactResourceProvider).atLeastOnce();
 
-    RecommendationResponse.BlueprintConfigurations coreSiteRecommendation = createMock(RecommendationResponse.BlueprintConfigurations.class);
+    RecommendationResponse.BlueprintConfigurations coreSiteRecommendation = createNiceMock(RecommendationResponse
+      .BlueprintConfigurations.class);
     expect(coreSiteRecommendation.getProperties()).andReturn(Collections.singletonMap("newPropertyRecommendation", "newPropertyRecommendation"));
 
-    RecommendationResponse.BlueprintConfigurations newTypeRecommendation = createMock(RecommendationResponse.BlueprintConfigurations.class);
+    RecommendationResponse.BlueprintConfigurations newTypeRecommendation = createNiceMock(RecommendationResponse.BlueprintConfigurations.class);
     expect(newTypeRecommendation.getProperties()).andReturn(Collections.singletonMap("newTypeRecommendation", "newTypeRecommendation"));
 
-    RecommendationResponse.BlueprintConfigurations type1Recommendation = createMock(RecommendationResponse.BlueprintConfigurations.class);
+    RecommendationResponse.BlueprintConfigurations type1Recommendation = createNiceMock(RecommendationResponse.BlueprintConfigurations.class);
     expect(type1Recommendation.getProperties()).andReturn(Collections.singletonMap("replacement1", "not replaced"));
 
-    RecommendationResponse.BlueprintConfigurations service1SiteRecommendation = createMock(RecommendationResponse.BlueprintConfigurations.class);
+    RecommendationResponse.BlueprintConfigurations service1SiteRecommendation = createNiceMock(RecommendationResponse.BlueprintConfigurations.class);
     expect(service1SiteRecommendation.getProperties()).andReturn(Collections.singletonMap("component1b.property", "replaced value"));
 
     Map<String, RecommendationResponse.BlueprintConfigurations> configurations = new HashMap<String, RecommendationResponse.BlueprintConfigurations>();
@@ -2439,7 +2444,8 @@ public class KerberosHelperTest extends EasyMockSupport {
             put("kdc_type", "mit-kdc");
             put("realm", "FOOBAR.COM");
             put("case_insensitive_username_rules", "false");
-          }
+            put("create_ambari_principal", "false");
+        }
         });
         put("", new HashMap<String, String>() {
           {
@@ -2483,6 +2489,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     propertiesKerberosEnv.put("password_min_digits", "1");
     propertiesKerberosEnv.put("password_min_punctuation", "0");
     propertiesKerberosEnv.put("password_min_whitespace","0");
+    propertiesKerberosEnv.put("create_ambari_principal", "false");
 
     Config configKrb5Conf = createMock(Config.class);
     expect(configKrb5Conf.getProperties()).andReturn(propertiesKrb5Conf).times(1);