You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rn...@apache.org on 2015/10/28 22:58:56 UTC

[2/3] ambari git commit: AMBARI-13431. Blueprints Configuration to select Kerberos. (Sandor Magyari via rnettleton)

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index 64be609..26d8c3e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -18,32 +18,44 @@
 
 package org.apache.ambari.server.topology;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
+import com.google.inject.Singleton;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.controller.RequestStatusResponse;
+import org.apache.ambari.server.controller.internal.ArtifactResourceProvider;
+import org.apache.ambari.server.controller.internal.CredentialResourceProvider;
 import org.apache.ambari.server.controller.internal.ProvisionClusterRequest;
+import org.apache.ambari.server.controller.internal.RequestImpl;
 import org.apache.ambari.server.controller.internal.ScaleClusterRequest;
 import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.orm.dao.HostRoleCommandStatusSummaryDTO;
 import org.apache.ambari.server.orm.entities.StageEntity;
+import org.apache.ambari.server.security.encryption.CredentialStoreService;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.host.HostImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Singleton;
+import javax.inject.Inject;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 
 /**
  * Manages all cluster provisioning actions on the cluster topology.
@@ -54,6 +66,7 @@ public class TopologyManager {
 
   public static final String INITIAL_CONFIG_TAG = "INITIAL";
   public static final String TOPOLOGY_RESOLVED_TAG = "TOPOLOGY_RESOLVED";
+  public static final String KDC_ADMIN_CREDENTIAL = "kdc.admin.credential";
 
   private PersistedState persistedState;
   private ExecutorService executor = Executors.newSingleThreadExecutor();
@@ -72,6 +85,14 @@ public class TopologyManager {
 
   private final Object initializationLock = new Object();
 
+
+  @Inject
+  private SecurityConfigurationFactory securityConfigurationFactory;
+
+  @Inject
+  private CredentialStoreService credentialStoreService;
+
+
   /**
    * A boolean not cached thread-local (volatile) to prevent double-checked
    * locking on the synchronized keyword.
@@ -93,6 +114,7 @@ public class TopologyManager {
           replayRequests(persistedState.getAllRequests());
           isInitialized = true;
         }
+
       }
     }
   }
@@ -100,11 +122,28 @@ public class TopologyManager {
   public RequestStatusResponse provisionCluster(ProvisionClusterRequest request) throws InvalidTopologyException, AmbariException {
     ensureInitialized();
     ClusterTopology topology = new ClusterTopologyImpl(ambariContext, request);
-    String clusterName = request.getClusterName();
+    final String clusterName = request.getClusterName();
 
     // get the id prior to creating ambari resources which increments the counter
     Long provisionId = ambariContext.getNextRequestId();
-    ambariContext.createAmbariResources(topology, clusterName);
+
+    SecurityConfiguration securityConfiguration = processSecurityConfiguration(request);
+    if (securityConfiguration != null && securityConfiguration.getType() == SecurityType.KERBEROS) {
+
+      addKerberosClient(topology);
+
+      // create Cluster resource with security_type = KERBEROS, this will trigger cluster Kerberization
+      // upon host install task execution
+      ambariContext.createAmbariResources(topology, clusterName, securityConfiguration.getType());
+      submitKerberosDescriptorAsArtifact(clusterName, securityConfiguration.getDescriptor());
+      Credential credential = request.getCredentialsMap().get(KDC_ADMIN_CREDENTIAL);
+      if (credential == null) {
+        throw new InvalidTopologyException(KDC_ADMIN_CREDENTIAL + " is missing from request.");
+      }
+      submitCredential(clusterName, credential);
+    } else {
+      ambariContext.createAmbariResources(topology, clusterName, null);
+    }
 
     long clusterId = ambariContext.getClusterId(clusterName);
     topology.setClusterId(clusterId);
@@ -112,19 +151,109 @@ public class TopologyManager {
     // persist request after it has successfully validated
     PersistedTopologyRequest persistedRequest = persistedState.persistTopologyRequest(request);
 
-
     clusterTopologyMap.put(clusterId, topology);
 
     addClusterConfigRequest(topology, new ClusterConfigurationRequest(ambariContext, topology, true));
+
+    final Stack stack = topology.getBlueprint().getStack();
+
     LogicalRequest logicalRequest = processRequest(persistedRequest, topology, provisionId);
 
     //todo: this should be invoked as part of a generic lifecycle event which could possibly
     //todo: be tied to cluster state
-    Stack stack = topology.getBlueprint().getStack();
+
     ambariContext.persistInstallStateForUI(clusterName, stack.getName(), stack.getVersion());
     return getRequestStatus(logicalRequest.getRequestId());
   }
 
+  private void submitCredential(String clusterName, Credential credential) {
+
+    ResourceProvider provider =
+        ambariContext.getClusterController().ensureResourceProvider(Resource.Type.Credential);
+
+    Map<String, Object> properties = new HashMap<>();
+    properties.put(CredentialResourceProvider.CREDENTIAL_CLUSTER_NAME_PROPERTY_ID, clusterName);
+    properties.put(CredentialResourceProvider.CREDENTIAL_ALIAS_PROPERTY_ID, KDC_ADMIN_CREDENTIAL);
+    properties.put(CredentialResourceProvider.CREDENTIAL_PRINCIPAL_PROPERTY_ID, credential.getPrincipal());
+    properties.put(CredentialResourceProvider.CREDENTIAL_KEY_PROPERTY_ID, credential.getKey());
+    properties.put(CredentialResourceProvider.CREDENTIAL_TYPE_PROPERTY_ID, credential.getType().name());
+
+    org.apache.ambari.server.controller.spi.Request request = new RequestImpl(Collections.<String>emptySet(),
+        Collections.singleton(properties), Collections.<String, String>emptyMap(), null);
+
+    try {
+      RequestStatus status = provider.createResources(request);
+      if (status.getStatus() != RequestStatus.Status.Complete) {
+        throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster!");
+      }
+    } catch (SystemException | UnsupportedPropertyException | NoSuchParentResourceException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster: " + e);
+    } catch (ResourceAlreadyExistsException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster as resource already exists.");
+    }
+
+  }
+
+  /**
+   * Retrieve security info from Blueprint if missing from Cluster Template request.
+   *
+   * @param request
+   * @return
+   */
+  private SecurityConfiguration processSecurityConfiguration(ProvisionClusterRequest request) {
+    LOG.debug("Getting security configuration from the request ...");
+    SecurityConfiguration securityConfiguration = request.getSecurityConfiguration();
+
+    if (securityConfiguration == null) {
+      // todo - perform this logic at request creation instead!
+      LOG.debug("There's no security configuration in the request, retrieving it from the associated blueprint");
+      securityConfiguration = request.getBlueprint().getSecurity();
+      if (securityConfiguration.getType() == SecurityType.KERBEROS) {
+        securityConfiguration = securityConfigurationFactory.loadSecurityConfigurationByReference
+          (securityConfiguration.getDescriptorReference());
+      }
+    }
+    return securityConfiguration;
+  }
+
+  private void submitKerberosDescriptorAsArtifact(String clusterName, String descriptor) {
+
+    ResourceProvider artifactProvider =
+        ambariContext.getClusterController().ensureResourceProvider(Resource.Type.Artifact);
+
+    Map<String, Object> properties = new HashMap<>();
+    properties.put(ArtifactResourceProvider.ARTIFACT_NAME_PROPERTY, "kerberos_descriptor");
+    properties.put("Artifacts/cluster_name", clusterName);
+
+    Map<String, String> requestInfoProps = new HashMap<>();
+    requestInfoProps.put(org.apache.ambari.server.controller.spi.Request.REQUEST_INFO_BODY_PROPERTY,
+        "{\"" + ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "\": " + descriptor + "}");
+
+    org.apache.ambari.server.controller.spi.Request request = new RequestImpl(Collections.<String>emptySet(),
+        Collections.singleton(properties), requestInfoProps, null);
+
+    try {
+      RequestStatus status = artifactProvider.createResources(request);
+      try {
+        while (status.getStatus() != RequestStatus.Status.Complete) {
+          LOG.info("Waiting for kerberos_descriptor artifact creation.");
+          Thread.sleep(100);
+        }
+      } catch (InterruptedException e) {
+        LOG.info("Wait for resource creation interrupted!");
+      }
+
+      if (status.getStatus() != RequestStatus.Status.Complete) {
+        throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster!");
+      }
+    } catch (SystemException | UnsupportedPropertyException | NoSuchParentResourceException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster: " + e);
+    } catch (ResourceAlreadyExistsException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster as resource already exists.");
+    }
+
+  }
+
   public RequestStatusResponse scaleHosts(ScaleClusterRequest request)
       throws InvalidTopologyException, AmbariException {
 
@@ -135,10 +264,10 @@ public class TopologyManager {
     ClusterTopology topology = clusterTopologyMap.get(clusterId);
     if (topology == null) {
       throw new InvalidTopologyException("Unable to retrieve cluster topology for cluster. This is most likely a " +
-                                         "result of trying to scale a cluster via the API which was created using " +
-                                         "the Ambari UI. At this time only clusters created via the API using a " +
-                                         "blueprint can be scaled with this API.  If the cluster was originally created " +
-                                         "via the API as described above, please file a Jira for this matter.");
+          "result of trying to scale a cluster via the API which was created using " +
+          "the Ambari UI. At this time only clusters created via the API using a " +
+          "blueprint can be scaled with this API.  If the cluster was originally created " +
+          "via the API as described above, please file a Jira for this matter.");
     }
     request.setClusterId(clusterId);
     PersistedTopologyRequest persistedRequest = persistedState.persistTopologyRequest(request);
@@ -161,7 +290,7 @@ public class TopologyManager {
     // The lock ordering in this method must always be the same ordering as TopologyManager.processRequest
     // TODO: Locking strategies for TopologyManager should be reviewed and possibly rewritten in a future release
     synchronized (availableHosts) {
-      synchronized(reservedHosts) {
+      synchronized (reservedHosts) {
         if (reservedHosts.containsKey(hostName)) {
           LogicalRequest request = reservedHosts.remove(hostName);
           HostOfferResponse response = request.offer(host);
@@ -329,30 +458,30 @@ public class TopologyManager {
 
     // The lock ordering in this method must always be the same ordering as TopologyManager.onHostRegistered
     // TODO: Locking strategies for TopologyManager should be reviewed and possibly rewritten in a future release
-    synchronized(availableHosts) {
+    synchronized (availableHosts) {
       Iterator<HostImpl> hostIterator = availableHosts.iterator();
-      while (! requestHostComplete && hostIterator.hasNext()) {
+      while (!requestHostComplete && hostIterator.hasNext()) {
         HostImpl host = hostIterator.next();
         synchronized (reservedHosts) {
           String hostname = host.getHostName();
-          if (reservedHosts.containsKey(hostname))  {
+          if (reservedHosts.containsKey(hostname)) {
             if (logicalRequest.equals(reservedHosts.get(hostname))) {
               // host is registered to this request, remove it from reserved map
               LOG.info("TopologyManager.processRequest: host name = {} is mapped to LogicalRequest ID = {} and will be removed from the reserved hosts.",
-                hostname, logicalRequest.getRequestId());
+                  hostname, logicalRequest.getRequestId());
               reservedHosts.remove(hostname);
             } else {
               // host is registered with another request, don't offer
               //todo: clean up logic
               LOG.info("TopologyManager.processRequest: host name = {} is registered with another request, and will not be offered to LogicalRequest ID = {}",
-                hostname, logicalRequest.getRequestId());
+                  hostname, logicalRequest.getRequestId());
               continue;
             }
           }
         }
 
         LOG.info("TopologyManager.processRequest: offering host name = {} to LogicalRequest ID = {}",
-          host.getHostName(), logicalRequest.getRequestId());
+            host.getHostName(), logicalRequest.getRequestId());
         HostOfferResponse response = logicalRequest.offer(host);
         switch (response.getAnswer()) {
           case ACCEPTED:
@@ -362,25 +491,25 @@ public class TopologyManager {
             //todo: may affect this behavior??
             hostIterator.remove();
             LOG.info("TopologyManager.processRequest: host name = {} was ACCEPTED by LogicalRequest ID = {} , host has been removed from available hosts.",
-              host.getHostName(), logicalRequest.getRequestId());
+                host.getHostName(), logicalRequest.getRequestId());
             processAcceptedHostOffer(getClusterTopology(logicalRequest.getClusterId()), response, host);
             break;
           case DECLINED_DONE:
             requestHostComplete = true;
             LOG.info("TopologyManager.processRequest: host name = {} was DECLINED_DONE by LogicalRequest ID = {}",
-              host.getHostName(), logicalRequest.getRequestId());
+                host.getHostName(), logicalRequest.getRequestId());
             break;
           case DECLINED_PREDICATE:
             LOG.info("TopologyManager.processRequest: host name = {} was DECLINED_PREDICATE by LogicalRequest ID = {}",
-              host.getHostName(), logicalRequest.getRequestId());
+                host.getHostName(), logicalRequest.getRequestId());
             break;
         }
       }
 
-      if (! requestHostComplete) {
+      if (!requestHostComplete) {
         // not all required hosts have been matched (see earlier comment regarding outstanding logical requests)
         LOG.info("TopologyManager.processRequest: not all required hosts have been matched, so adding LogicalRequest ID = {} to outstanding requests",
-          logicalRequest.getRequestId());
+            logicalRequest.getRequestId());
         synchronized (outstandingRequests) {
           outstandingRequests.add(logicalRequest);
         }
@@ -399,7 +528,7 @@ public class TopologyManager {
 
     allRequests.put(logicalRequest.getRequestId(), logicalRequest);
     LOG.info("TopologyManager.createLogicalRequest: created LogicalRequest with ID = {} and completed persistence of this request.",
-      logicalRequest.getRequestId());
+        logicalRequest.getRequestId());
     synchronized (reservedHosts) {
       for (String host : logicalRequest.getReservedHosts()) {
         reservedHosts.put(host, logicalRequest);
@@ -424,11 +553,11 @@ public class TopologyManager {
     persistedState.registerHostName(response.getHostRequestId(), hostName);
 
     LOG.info("TopologyManager.processAcceptedHostOffer: about to execute tasks for host = {}",
-      hostName);
+        hostName);
 
     for (TopologyTask task : response.getTasks()) {
       LOG.info("Processing accepted host offer for {} which responded {} and task {}",
-        hostName, response.getAnswer(), task.getType());
+          hostName, response.getAnswer(), task.getType());
 
       task.init(topology, ambariContext);
       executor.execute(task);
@@ -444,7 +573,7 @@ public class TopologyManager {
 
       for (LogicalRequest logicalRequest : requestEntry.getValue()) {
         allRequests.put(logicalRequest.getRequestId(), logicalRequest);
-        if (! logicalRequest.hasCompleted()) {
+        if (!logicalRequest.hasCompleted()) {
           outstandingRequests.add(logicalRequest);
           for (String reservedHost : logicalRequest.getReservedHosts()) {
             reservedHosts.put(reservedHost, logicalRequest);
@@ -466,9 +595,9 @@ public class TopologyManager {
         }
       }
 
-      if (! configChecked) {
+      if (!configChecked) {
         configChecked = true;
-        if (! ambariContext.doesConfigurationWithTagExist(topology.getClusterId(), TOPOLOGY_RESOLVED_TAG)) {
+        if (!ambariContext.doesConfigurationWithTagExist(topology.getClusterId(), TOPOLOGY_RESOLVED_TAG)) {
           LOG.info("TopologyManager.replayRequests: no config with TOPOLOGY_RESOLVED found, adding cluster config request");
           addClusterConfigRequest(topology, new ClusterConfigurationRequest(ambariContext, topology, false));
         }
@@ -478,7 +607,6 @@ public class TopologyManager {
 
   //todo: this should invoke a callback on each 'service' in the topology
   private void finalizeTopology(TopologyRequest request, ClusterTopology topology) {
-    addKerberosClientIfNecessary(topology);
   }
 
   private boolean isHostIgnored(String host) {
@@ -490,11 +618,9 @@ public class TopologyManager {
    *
    * @param topology  cluster topology
    */
-  private void addKerberosClientIfNecessary(ClusterTopology topology) {
-    if (topology.isClusterKerberosEnabled()) {
-      for (HostGroup group : topology.getBlueprint().getHostGroups().values()) {
-        group.addComponent("KERBEROS_CLIENT");
-      }
+  private void addKerberosClient(ClusterTopology topology) {
+    for (HostGroup group : topology.getBlueprint().getHostGroups().values()) {
+      group.addComponent("KERBEROS_CLIENT");
     }
   }
 
@@ -529,7 +655,7 @@ public class TopologyManager {
       boolean interrupted = false;
 
       Collection<String> requiredHostGroups = getTopologyRequiredHostGroups();
-      while (! completed && ! interrupted) {
+      while (!completed && !interrupted) {
         try {
           Thread.sleep(100);
         } catch (InterruptedException e) {
@@ -543,7 +669,7 @@ public class TopologyManager {
 
       LOG.info("TopologyManager.ConfigureClusterTask: All Required host groups are completed, Cluster Configuration can now begin");
 
-      if (! interrupted) {
+      if (!interrupted) {
         try {
           LOG.info("TopologyManager.ConfigureClusterTask: Setting Configuration on cluster");
           // sets updated configuration on topology and cluster
@@ -591,12 +717,12 @@ public class TopologyManager {
           configTopologyResolved = false;
           if (groupInfo != null) {
             LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} requires {} hosts to be mapped, but only {} are available.",
-              groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+                groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
           }
           break;
         } else {
           LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} has been fully resolved, as all {} required hosts are mapped to {} physical hosts.",
-            groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+              groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
         }
       }
       return configTopologyResolved;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
index da0c9e1..49009ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
@@ -27,6 +27,7 @@ import java.util.Map;
  * Factory for creating topology requests.
  */
 public interface TopologyRequestFactory {
-  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties) throws InvalidTopologyTemplateException;
+
+  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties, SecurityConfiguration securityConfiguration) throws InvalidTopologyTemplateException;
   // todo: use to create other request types
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
index 8b23b44..71878a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
@@ -28,7 +28,8 @@ import java.util.Map;
  */
 public class TopologyRequestFactoryImpl implements TopologyRequestFactory {
   @Override
-  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties) throws InvalidTopologyTemplateException {
-    return new ProvisionClusterRequest(properties);
+  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties, SecurityConfiguration securityConfiguration) throws InvalidTopologyTemplateException {
+    return new ProvisionClusterRequest(properties, securityConfiguration);
+
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index d85b508..4dc300e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -113,6 +113,10 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
   private static final String KERBEROS_DESCRIPTOR_NAME_COLUMN = "kerberos_descriptor_name";
   private static final String KERBEROS_DESCRIPTOR_COLUMN = "kerberos_descriptor";
 
+  private static final String BLUEPRINT_TABLE = "blueprint";
+  private static final String SECURITY_TYPE_COLUMN = "security_type";
+  private static final String SECURITY_DESCRIPTOR_REF_COLUMN = "security_descriptor_reference";
+
   /**
    * Logger.
    */
@@ -171,6 +175,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     dbAccessor.alterColumn(HOST_ROLE_COMMAND_TABLE, new DBColumnInfo(HOST_ID_COL, Long.class, null, null, true));
 
     addKerberosDescriptorTable();
+    executeBlueprintDDLUpdates();
   }
 
   protected void executeUpgradeDDLUpdates() throws AmbariException, SQLException {
@@ -186,9 +191,16 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     dbAccessor.createTable(KERBEROS_DESCRIPTOR_TABLE, columns, KERBEROS_DESCRIPTOR_NAME_COLUMN);
   }
 
-  /**
-   * {@inheritDoc}
-   */
+  private void executeBlueprintDDLUpdates() throws AmbariException, SQLException {
+    dbAccessor.addColumn(BLUEPRINT_TABLE, new DBAccessor.DBColumnInfo(SECURITY_TYPE_COLUMN,
+      String.class, 32, "NONE", false));
+    dbAccessor.addColumn(BLUEPRINT_TABLE, new DBAccessor.DBColumnInfo(SECURITY_DESCRIPTOR_REF_COLUMN,
+      String.class, null, null, true));
+  }
+
+    /**
+     * {@inheritDoc}
+     */
   @Override
   protected void executePreDMLUpdates() throws AmbariException, SQLException {
     // execute DDL updates

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 9330310..62d8054 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -397,6 +397,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id BIGINT NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 8b36f9e..fe024ce 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -388,6 +388,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR2(255) NOT NULL,
   stack_id NUMBER(19) NOT NULL,
+  security_type VARCHAR2(32) DEFAULT 'NONE' NOT NULL,
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 764396e..3ae65ee 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -389,6 +389,8 @@ CREATE TABLE requestschedulebatchrequest (
 
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   stack_id BIGINT NOT NULL,
   PRIMARY KEY(blueprint_name)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index dbca53e..c014443 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -437,6 +437,8 @@ GRANT ALL PRIVILEGES ON TABLE ambari.requestschedulebatchrequest TO :username;
 CREATE TABLE ambari.blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id BIGINT NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 2f93825..bba17a5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -386,6 +386,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id NUMERIC(19) NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index ef90205..60938c3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -444,6 +444,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id BIGINT NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY CLUSTERED (blueprint_name)
   );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 82fb327..727bdc7 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -348,6 +348,7 @@
         "Blueprints/blueprint_name",
         "Blueprints/stack_name",
         "Blueprints/stack_version",
+        "Blueprints/security",
         "host_groups",
         "host_groups/components",
         "host_groups/cardinality",

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
index 71226a1..4f0263b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
@@ -17,27 +17,11 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMORY_URL;
-import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMROY_DRIVER;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-
-import javax.persistence.EntityManager;
-
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.util.Modules;
 import org.apache.ambari.server.api.query.render.AlertStateSummary;
 import org.apache.ambari.server.api.query.render.AlertSummaryGroupedRenderer;
 import org.apache.ambari.server.api.query.render.AlertSummaryGroupedRenderer.AlertDefinitionSummary;
@@ -73,11 +57,26 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.util.Modules;
+import javax.persistence.EntityManager;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMORY_URL;
+import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMROY_DRIVER;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test the AlertResourceProvider class
@@ -577,6 +576,9 @@ public class AlertResourceProviderTest {
       expect(configuration.getDatabaseUser()).andReturn("test").anyTimes();
       expect(configuration.getDatabasePassword()).andReturn("test").anyTimes();
       expect(configuration.getAlertEventPublisherPoolSize()).andReturn(Integer.valueOf(Configuration.ALERTS_EXECUTION_SCHEDULER_THREADS_DEFAULT)).anyTimes();
+      expect(configuration.getMasterKeyLocation()).andReturn(new File("/test")).anyTimes();
+      expect(configuration.getTemporaryKeyStoreRetentionMinutes()).andReturn(2l).anyTimes();
+      expect(configuration.isActivelyPurgeTemporaryKeyStore()).andReturn(true).anyTimes();
       replay(configuration);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
index 5bfdebb..369bf02 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
@@ -18,32 +18,7 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.gson.Gson;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategy;
@@ -55,10 +30,10 @@ import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
-import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.BlueprintDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
@@ -69,17 +44,45 @@ import org.apache.ambari.server.orm.entities.HostGroupComponentEntity;
 import org.apache.ambari.server.orm.entities.HostGroupConfigEntity;
 import org.apache.ambari.server.orm.entities.HostGroupEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
-import org.apache.ambari.server.utils.StageUtils;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.topology.Blueprint;
 import org.apache.ambari.server.topology.BlueprintFactory;
 import org.apache.ambari.server.topology.InvalidTopologyException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
+import org.apache.ambari.server.utils.StageUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
-import com.google.gson.Gson;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * BlueprintResourceProvider unit tests.
@@ -98,13 +101,13 @@ public class BlueprintResourceProviderTest {
   private final static Blueprint blueprint = createMock(Blueprint.class);
   private final static AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class);
   private final static BlueprintFactory blueprintFactory = createMock(BlueprintFactory.class);
+  private final static SecurityConfigurationFactory securityFactory = createMock(SecurityConfigurationFactory.class);
   private final static BlueprintResourceProvider provider = createProvider();
   private final static Gson gson = new Gson();
 
-
   @BeforeClass
   public static void initClass() {
-    BlueprintResourceProvider.init(blueprintFactory, dao, gson);
+    BlueprintResourceProvider.init(blueprintFactory, dao, securityFactory, gson);
 
     StackEntity stackEntity = new StackEntity();
     stackEntity.setStackName("test-stack-name");
@@ -112,15 +115,14 @@ public class BlueprintResourceProviderTest {
 
     expect(
         stackDAO.find(anyObject(String.class),
-            anyObject(String.class))).andReturn(stackEntity).anyTimes();
-
+          anyObject(String.class))).andReturn(stackEntity).anyTimes();
     replay(stackDAO);
 
   }
 
   @Before
   public void resetGlobalMocks() {
-    reset(dao, metaInfo, blueprintFactory, blueprint, entity);
+    reset(dao, metaInfo, blueprintFactory, securityFactory, blueprint, entity);
   }
 
   @Test
@@ -133,7 +135,8 @@ public class BlueprintResourceProviderTest {
     Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
+    expect(securityFactory.createSecurityConfigurationFromRequest(null, true)).andReturn(null).anyTimes();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -143,7 +146,7 @@ public class BlueprintResourceProviderTest {
     expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null);
     dao.create(entity);
 
-    replay(dao, entity, metaInfo, blueprintFactory, blueprint, request, managementController);
+    replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request, managementController);
     // end expectations
 
     ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
@@ -164,7 +167,7 @@ public class BlueprintResourceProviderTest {
     assertEquals(request, lastEvent.getRequest());
     assertNull(lastEvent.getPredicate());
 
-    verify(dao, entity, blueprintFactory, metaInfo, request, managementController);
+    verify(dao, entity, blueprintFactory, securityFactory, metaInfo, request, managementController);
   }
 
   @Test()
@@ -211,7 +214,7 @@ public class BlueprintResourceProviderTest {
     requestInfoProperties.put("validate_topology", "false");
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     expect(blueprint.toEntity()).andReturn(entity);
     expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce();
@@ -252,7 +255,7 @@ public class BlueprintResourceProviderTest {
     Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce();
     blueprint.validateTopology();
@@ -295,7 +298,7 @@ public class BlueprintResourceProviderTest {
     Request request = createMock(Request.class);
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -340,12 +343,13 @@ public class BlueprintResourceProviderTest {
     Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andThrow(
-        new IllegalArgumentException("Blueprint name must be provided"));
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andThrow(
+      new IllegalArgumentException("Blueprint name must be provided"));
+    expect(securityFactory.createSecurityConfigurationFromRequest(null,true)).andReturn(null).anyTimes();
     expect(request.getProperties()).andReturn(setProperties);
     expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties);
 
-    replay(dao, entity, metaInfo, blueprintFactory, blueprint, request);
+    replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request);
     // end expectations
 
     try {
@@ -357,6 +361,51 @@ public class BlueprintResourceProviderTest {
     verify(dao, entity, blueprintFactory, metaInfo, request);
   }
 
+  @Test
+  public void testCreateResources_withSecurityConfiguration() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    Request request = createMock(Request.class);
+
+    Set<Map<String, Object>> setProperties = getBlueprintTestProperties();
+    Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
+
+    // set expectations
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+      (securityConfiguration).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), securityConfiguration)).andReturn(blueprint).once();
+    blueprint.validateRequiredProperties();
+    blueprint.validateTopology();
+    expect(blueprint.toEntity()).andReturn(entity);
+    expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce();
+    expect(request.getProperties()).andReturn(setProperties);
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties);
+    expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null);
+    dao.create(entity);
+
+    replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request, managementController);
+    // end expectations
+
+    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
+      Resource.Type.Blueprint,
+      PropertyHelper.getPropertyIds(Resource.Type.Blueprint),
+      PropertyHelper.getKeyPropertyIds(Resource.Type.Blueprint),
+      managementController);
+
+    AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
+    ((ObservableResourceProvider)provider).addObserver(observer);
+
+    provider.createResources(request);
+
+    ResourceProviderEvent lastEvent = observer.getLastEvent();
+    assertNotNull(lastEvent);
+    assertEquals(Resource.Type.Blueprint, lastEvent.getResourceType());
+    assertEquals(ResourceProviderEvent.Type.Create, lastEvent.getType());
+    assertEquals(request, lastEvent.getRequest());
+    assertNull(lastEvent.getPredicate());
+
+    verify(dao, entity, blueprintFactory, metaInfo, request, managementController);
+  }
 
   @Test
   public void testGetResourcesNoPredicate() throws SystemException, UnsupportedPropertyException,
@@ -443,7 +492,7 @@ public class BlueprintResourceProviderTest {
     Request request = createMock(Request.class);
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -488,7 +537,7 @@ public class BlueprintResourceProviderTest {
     Request request = createMock(Request.class);
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -648,6 +697,10 @@ public class BlueprintResourceProviderTest {
     return Collections.singleton(mapProperties);
   }
 
+  public static Map<String, Object> getBlueprintRawBodyProperties() {
+    return new HashMap<String, Object>();
+  }
+
   public static void setConfigurationProperties(Set<Map<String, Object>> properties ) {
     Map<String, String> clusterProperties = new HashMap<String, String>();
     clusterProperties.put("core-site/properties/fs.trash.interval", "480");
@@ -733,6 +786,7 @@ public class BlueprintResourceProviderTest {
       assertEquals(1, finalAttrs.size());
       assertEquals("true", finalAttrs.get("ipc.client.idlethreshold"));
     }
+
   }
 
   private static BlueprintResourceProvider createProvider() {
@@ -794,7 +848,6 @@ public class BlueprintResourceProviderTest {
     return setPropertiesInfo;
   }
 
-
   @Test
   public void testPopulateConfigurationEntity_oldSchema() throws Exception {
     Map<String, String> configuration = new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index 0b2e905..f885a5b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -18,27 +18,7 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
+import com.google.gson.Gson;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ClusterRequest;
 import org.apache.ambari.server.controller.ClusterResponse;
@@ -56,8 +36,9 @@ import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.topology.Blueprint;
 import org.apache.ambari.server.topology.BlueprintFactory;
 import org.apache.ambari.server.topology.InvalidTopologyException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
 import org.apache.ambari.server.topology.TopologyManager;
-import org.apache.ambari.server.topology.TopologyRequest;
 import org.apache.ambari.server.topology.TopologyRequestFactory;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
@@ -66,6 +47,29 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+
 
 /**
  * ClusterResourceProvider tests.
@@ -80,41 +84,108 @@ public class ClusterResourceProviderTest {
   private static final Request request = createNiceMock(Request.class);
   private static final TopologyManager topologyManager = createStrictMock(TopologyManager.class);
   private static final TopologyRequestFactory topologyFactory = createStrictMock(TopologyRequestFactory.class);
+  private final static SecurityConfigurationFactory securityFactory = createMock(SecurityConfigurationFactory.class);
   private static final ProvisionClusterRequest topologyRequest = createNiceMock(ProvisionClusterRequest.class);
   private static final BlueprintFactory blueprintFactory = createStrictMock(BlueprintFactory.class);
   private static final Blueprint blueprint = createNiceMock(Blueprint.class);
   private static final RequestStatusResponse requestStatusResponse = createNiceMock(RequestStatusResponse.class);
+  private static final Gson gson = new Gson();
 
   @Before
   public void setup() throws Exception{
-    ClusterResourceProvider.init(topologyManager, topologyFactory);
+    ClusterResourceProvider.init(topologyManager, topologyFactory, securityFactory, gson);
     ProvisionClusterRequest.init(blueprintFactory);
     provider = new ClusterResourceProvider(controller);
 
     expect(blueprintFactory.getBlueprint(BLUEPRINT_NAME)).andReturn(blueprint).anyTimes();
+    expect(securityFactory.createSecurityConfigurationFromRequest(null, false)).andReturn(null).anyTimes();
   }
 
   @After
   public void tearDown() {
-    reset(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, requestStatusResponse, blueprint);
+    reset(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, securityFactory,
+      requestStatusResponse, blueprint);
   }
 
   private void replayAll() {
-    replay(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, requestStatusResponse, blueprint);
+    replay(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, securityFactory,
+      requestStatusResponse, blueprint);
   }
 
   private void verifyAll() {
-    verify(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, requestStatusResponse, blueprint);
+    verify(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, securityFactory,
+      requestStatusResponse, blueprint);
   }
 
   @Test
   public void testCreateResource_blueprint() throws Exception {
     Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
     Map<String, Object> properties = requestProperties.iterator().next();
+    Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{}");
 
     // set expectations
     expect(request.getProperties()).andReturn(requestProperties).anyTimes();
-    expect(topologyFactory.createProvisionClusterRequest(properties)).andReturn(topologyRequest).once();
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
+
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn(null)
+      .once();
+    expect(topologyFactory.createProvisionClusterRequest(properties, null)).andReturn(topologyRequest).once();
+    expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
+    expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
+
+    replayAll();
+    RequestStatus requestStatus = provider.createResources(request);
+    assertEquals(5150L, requestStatus.getRequestResource().getPropertyValue(PropertyHelper.getPropertyId("Requests", "id")));
+    assertEquals(Resource.Type.Request, requestStatus.getRequestResource().getType());
+    assertEquals("Accepted", requestStatus.getRequestResource().getPropertyValue(PropertyHelper.getPropertyId("Requests", "status")));
+
+    verifyAll();
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testCreateResource_blueprint_withInvalidSecurityConfiguration() throws Exception {
+    Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    Map<String, Object> properties = requestProperties.iterator().next();
+    Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{\"security\" : {\n\"type\" : \"NONE\"," +
+      "\n\"kerberos_descriptor_reference\" : " + "\"testRef\"\n}}");
+    SecurityConfiguration blueprintSecurityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef",
+      null);
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.NONE, null, null);
+
+    // set expectations
+    expect(request.getProperties()).andReturn(requestProperties).anyTimes();
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
+
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+      (securityConfiguration).once();
+    expect(topologyFactory.createProvisionClusterRequest(properties, securityConfiguration)).andReturn(topologyRequest).once();
+    expect(topologyRequest.getBlueprint()).andReturn(blueprint).anyTimes();
+    expect(blueprint.getSecurity()).andReturn(blueprintSecurityConfiguration).anyTimes();
+    expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
+
+    replayAll();
+    RequestStatus requestStatus = provider.createResources(request);
+  }
+
+  @Test
+  public void testCreateResource_blueprint_withSecurityConfiguration() throws Exception {
+    Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    Map<String, Object> properties = requestProperties.iterator().next();
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
+
+    Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{\"security\" : {\n\"type\" : \"KERBEROS\",\n\"kerberos_descriptor_reference\" : " +
+      "\"testRef\"\n}}");
+
+        // set expectations
+    expect(request.getProperties()).andReturn(requestProperties).anyTimes();
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
+
+    expect(topologyFactory.createProvisionClusterRequest(properties, securityConfiguration)).andReturn(topologyRequest).once();
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+      (securityConfiguration).once();
     expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
     expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
 
@@ -135,7 +206,8 @@ public class ClusterResourceProviderTest {
     // set expectations
     expect(request.getProperties()).andReturn(requestProperties).anyTimes();
     // throw exception from topology request factory an assert that the correct exception is thrown from resource provider
-    expect(topologyFactory.createProvisionClusterRequest(properties)).andThrow(new InvalidTopologyException("test"));
+    expect(topologyFactory.createProvisionClusterRequest(properties, null)).andThrow(new InvalidTopologyException
+      ("test"));
 
     replayAll();
     provider.createResources(request);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
index 21aafc5..5ace2a5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
@@ -36,9 +36,11 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
@@ -97,9 +99,9 @@ public class ProvisionClusterRequestTest {
     // reset host resource provider expectations to none since we are not specifying a host predicate
     reset(hostResourceProvider);
     replay(hostResourceProvider);
-
     Map<String, Object> properties = createBlueprintRequestPropertiesNameOnly(CLUSTER_NAME, BLUEPRINT_NAME);
-    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
 
     assertEquals(CLUSTER_NAME, provisionClusterRequest.getClusterName());
     assertEquals(TopologyRequest.Type.PROVISION, provisionClusterRequest.getType());
@@ -149,9 +151,9 @@ public class ProvisionClusterRequestTest {
     // reset host resource provider expectations to none since we are not specifying a host predicate
     reset(hostResourceProvider);
     replay(hostResourceProvider);
-
     Map<String, Object> properties = createBlueprintRequestPropertiesCountOnly(CLUSTER_NAME, BLUEPRINT_NAME);
-    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
 
     assertEquals(CLUSTER_NAME, provisionClusterRequest.getClusterName());
     assertEquals(TopologyRequest.Type.PROVISION, provisionClusterRequest.getType());
@@ -203,7 +205,7 @@ public class ProvisionClusterRequestTest {
   @Test
   public void testMultipleGroups() throws Exception {
     Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
-    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties);
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
 
     assertEquals(CLUSTER_NAME, provisionClusterRequest.getClusterName());
     assertEquals(TopologyRequest.Type.PROVISION, provisionClusterRequest.getType());
@@ -279,7 +281,43 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
+  }
+
+  @Test
+  public void test_Creditentials() throws Exception {
+    Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    HashMap<String, String> credentialHashMap = new HashMap<>();
+    credentialHashMap.put("alias", "testAlias");
+    credentialHashMap.put("principal", "testPrincipal");
+    credentialHashMap.put("key", "testKey");
+    credentialHashMap.put("type", "temporary");
+    Set<Map<String, String>> credentialsSet = new HashSet<>();
+    credentialsSet.add(credentialHashMap);
+    properties.put("credentials", credentialsSet);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
+
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getAlias(), "testAlias");
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getPrincipal(), "testPrincipal");
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getKey(), "testKey");
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getType().name(), "TEMPORARY");
+  }
+
+
+  @Test(expected=InvalidTopologyTemplateException.class)
+  public void test_CreditentialsInvalidType() throws Exception {
+    Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    HashMap<String, String> credentialHashMap = new HashMap<>();
+    credentialHashMap.put("alias", "testAlias");
+    credentialHashMap.put("principal", "testPrincipal");
+    credentialHashMap.put("key", "testKey");
+    credentialHashMap.put("type", "testType");
+    Set<Map<String, String>> credentialsSet = new HashSet<>();
+    credentialsSet.add(credentialHashMap);
+    properties.put("credentials", credentialsSet);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected= InvalidTopologyTemplateException.class)
@@ -291,7 +329,7 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected= InvalidTopologyTemplateException.class)
@@ -303,7 +341,7 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected = InvalidTopologyTemplateException.class)
@@ -323,14 +361,14 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test
   public void testGetValidators_noDefaultPassword() throws Exception {
     Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
     //properties.put("default_password", "pwd");
-    TopologyRequest request = new ProvisionClusterRequest(properties);
+    TopologyRequest request = new ProvisionClusterRequest(properties, null);
     List<TopologyValidator> validators = request.getTopologyValidators();
 
     assertEquals(1, validators.size());
@@ -344,7 +382,7 @@ public class ProvisionClusterRequestTest {
   public void testGetValidators_defaultPassword() throws Exception {
     Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
     properties.put("default_password", "pwd");
-    TopologyRequest request = new ProvisionClusterRequest(properties);
+    TopologyRequest request = new ProvisionClusterRequest(properties, null);
     List<TopologyValidator> validators = request.getTopologyValidators();
 
     assertEquals(1, validators.size());
@@ -359,11 +397,11 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     // checkPropertyIds() returns invalid property names
     expect(hostResourceProvider.checkPropertyIds(Collections.singleton("Hosts/host_name"))).
-        andReturn(Collections.singleton("Hosts/host_name"));
+      andReturn(Collections.singleton("Hosts/host_name"));
     replay(hostResourceProvider);
 
     // should result in an exception due to invalid property in host predicate
-    new ProvisionClusterRequest(createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME));
+    new ProvisionClusterRequest(createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME), null);
   }
 
   @Test(expected = InvalidTopologyTemplateException.class)
@@ -375,7 +413,7 @@ public class ProvisionClusterRequestTest {
     Map<String, Object> properties = createBlueprintRequestPropertiesNameOnly(CLUSTER_NAME, BLUEPRINT_NAME);
     ((Map) ((List) properties.get("host_groups")).iterator().next()).put("host_count", "5");
     // should result in an exception due to both host name and host count being specified
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected = InvalidTopologyTemplateException.class)
@@ -387,7 +425,7 @@ public class ProvisionClusterRequestTest {
     Map<String, Object> properties = createBlueprintRequestPropertiesNameOnly(CLUSTER_NAME, BLUEPRINT_NAME);
     ((Map) ((List) properties.get("host_groups")).iterator().next()).put("host_predicate", "Hosts/host_name=myTestHost");
     // should result in an exception due to both host name and host count being specified
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   public static Map<String, Object> createBlueprintRequestProperties(String clusterName, String blueprintName) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
index dff0a62..4c8892e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
@@ -22,16 +22,17 @@ import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.util.Modules;
-import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.WidgetEntity;
+import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.easymock.Capture;
@@ -510,6 +511,9 @@ public class WidgetResourceProviderTest {
           EasyMock.createNiceMock(Clusters.class));
       binder.bind(Cluster.class).toInstance(
               EasyMock.createNiceMock(Cluster.class));
+      binder.bind(CredentialStoreService.class).toInstance(
+        EasyMock.createNiceMock(CredentialStoreService.class)
+      );
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index 0c38802..626024a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -17,26 +17,14 @@
  */
 package org.apache.ambari.server.state;
 
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
+import com.google.gson.Gson;
 import com.google.gson.reflect.TypeToken;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.persist.PersistService;
+import com.google.inject.util.Modules;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -45,30 +33,43 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
-import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
-import org.apache.ambari.server.orm.dao.StackDAO;
-import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.stack.HostsType;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.state.UpgradeHelper.UpgradeGroupHolder;
 import org.apache.ambari.server.state.stack.ConfigUpgradePack;
 import org.apache.ambari.server.state.stack.UpgradePack;
-import org.apache.ambari.server.state.stack.upgrade.*;
+import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition;
+import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.ManualTask;
+import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
+import org.apache.ambari.server.state.stack.upgrade.Task;
+import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
-import com.google.inject.persist.PersistService;
-import com.google.inject.util.Modules;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Tests the {@link UpgradeHelper} class

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
index a1e2b48..2d9d779 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
@@ -18,18 +18,11 @@
 
 package org.apache.ambari.server.state.cluster;
 
-import static org.junit.Assert.fail;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import junit.framework.Assert;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -52,6 +45,7 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.RepositoryVersionState;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
@@ -61,12 +55,16 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Guice;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
-import junit.framework.Assert;
+import static org.junit.Assert.fail;
 
 public class ClustersTest {
 
@@ -165,6 +163,19 @@ public class ClustersTest {
 
   }
 
+  @Test
+  public void testAddAndGetClusterWithSecurityType() throws AmbariException {
+    StackId stackId = new StackId("HDP-2.1.1");
+
+    String c1 = "foo";
+    SecurityType securityType = SecurityType.KERBEROS;
+    clusters.addCluster(c1, stackId, securityType);
+
+    Assert.assertNotNull(clusters.getCluster(c1));
+
+    Assert.assertEquals(c1, clusters.getCluster(c1).getClusterName());
+    Assert.assertEquals(securityType, clusters.getCluster(c1).getSecurityType());
+  }
 
   @Test
   public void testAddAndGetHost() throws AmbariException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
index 1038b60..254d3a3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
@@ -18,16 +18,6 @@
 
 package org.apache.ambari.server.topology;
 
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ClusterRequest;
 import org.apache.ambari.server.controller.ConfigGroupRequest;
@@ -54,11 +44,19 @@ import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.easymock.Capture;
-import org.easymock.EasyMockSupport;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
@@ -257,7 +255,7 @@ public class AmbariContextTest {
     replayAll();
 
     // test
-    context.createAmbariResources(topology, CLUSTER_NAME);
+    context.createAmbariResources(topology, CLUSTER_NAME, null);
 
     // assertions
     ClusterRequest clusterRequest = clusterRequestCapture.getValue();

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
index 31b8f5c..3a3b6dc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
@@ -120,7 +120,7 @@ public class BlueprintFactoryTest {
     Map<String, Object> props = BlueprintResourceProviderTest.getBlueprintTestProperties().iterator().next();
 
     replay(stack, dao, entity, configEntity);
-    Blueprint blueprint = testFactory.createBlueprint(props);
+    Blueprint blueprint = testFactory.createBlueprint(props, null);
 
     assertEquals(BLUEPRINT_NAME, blueprint.getName());
     assertSame(stack, blueprint.getStack());
@@ -187,7 +187,7 @@ public class BlueprintFactoryTest {
     props.remove(BlueprintResourceProvider.BLUEPRINT_NAME_PROPERTY_ID);
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -197,7 +197,7 @@ public class BlueprintFactoryTest {
     ((Set<Map<String, Object>>) props.get(BlueprintResourceProvider.HOST_GROUP_PROPERTY_ID)).clear();
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -207,7 +207,7 @@ public class BlueprintFactoryTest {
     ((Set<Map<String, Object>>) props.get(BlueprintResourceProvider.HOST_GROUP_PROPERTY_ID)).iterator().next().remove("name");
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -218,7 +218,7 @@ public class BlueprintFactoryTest {
         iterator().next().remove(BlueprintResourceProvider.COMPONENT_PROPERTY_ID);
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -229,7 +229,7 @@ public class BlueprintFactoryTest {
         iterator().next().get(BlueprintResourceProvider.COMPONENT_PROPERTY_ID)).iterator().next().put("name", "INVALID_COMPONENT");
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   private class TestBlueprintFactory extends BlueprintFactory {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2337e539/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
index 9d4163a..2199247 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
@@ -19,6 +19,8 @@
 package org.apache.ambari.server.topology;
 
 import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+import org.apache.ambari.server.state.SecurityType;
 import org.junit.Test;
 
 import java.util.Collection;
@@ -31,6 +33,7 @@ import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 /**
@@ -119,10 +122,14 @@ public class BlueprintImplTest {
     // for this basic test not ensuring that stack properties are ignored, this is tested in another test
     Configuration configuration = new Configuration(properties, attributes, EMPTY_CONFIGURATION);
 
-    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration);
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, securityConfiguration);
     blueprint.validateRequiredProperties();
+    BlueprintEntity entity = blueprint.toEntity();
 
     verify(stack, group1, group2);
+    assertTrue(entity.getSecurityType() == SecurityType.KERBEROS);
+    assertTrue(entity.getSecurityDescriptorReference().equals("testRef"));
   }
 
   @Test
@@ -192,7 +199,7 @@ public class BlueprintImplTest {
     // for this basic test not ensuring that stack properties are ignored, this is tested in another test
     Configuration configuration = new Configuration(properties, attributes, EMPTY_CONFIGURATION);
 
-    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration);
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     try {
       blueprint.validateRequiredProperties();
       fail("Expected exception to be thrown for missing config property");
@@ -276,10 +283,13 @@ public class BlueprintImplTest {
 
     replay(stack, group1, group2);
 
-    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration);
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
+    BlueprintEntity entity = blueprint.toEntity();
 
     verify(stack, group1, group2);
+    assertTrue(entity.getSecurityType() == SecurityType.NONE);
+    assertTrue(entity.getSecurityDescriptorReference() == null);
   }
 
   //todo: ensure coverage for these existing tests