You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rn...@apache.org on 2015/10/29 16:19:23 UTC

[1/3] ambari git commit: AMBARI-13431. Blueprints Configuration to select Kerberos. (Sandor Magyari and Laszlo Puskas via rnettleton)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 614893c57 -> 6e67b5e54


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/topology/SecurityConfigurationFactoryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/SecurityConfigurationFactoryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/SecurityConfigurationFactoryTest.java
new file mode 100644
index 0000000..bf9556f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/SecurityConfigurationFactoryTest.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import com.google.gson.Gson;
+import org.apache.ambari.server.orm.dao.KerberosDescriptorDAO;
+import org.apache.ambari.server.orm.entities.KerberosDescriptorEntity;
+import org.apache.ambari.server.state.SecurityType;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.easymock.EasyMockRule;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.MockType;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.capture;
+import static org.junit.Assert.assertTrue;
+
+public class SecurityConfigurationFactoryTest {
+  private static final String TEST_KERBEROS_DESCRIPTOR_REFERENCE = "test-kd-reference";
+  private static final String TEST_KERBEROS_DESCRIPTOR_JSON = "{\"test\":\"test json\"}";
+
+  @Rule
+  public EasyMockRule mocks = new EasyMockRule(this);
+
+  @Mock(type = MockType.STRICT)
+  private KerberosDescriptorDAO kerberosDescriptorDAO;
+
+  private SecurityConfigurationFactory testSubject;
+
+  @Before
+  public void before() {
+    testSubject = new SecurityConfigurationFactory(new Gson(), kerberosDescriptorDAO, new KerberosDescriptorFactory());
+    EasyMockSupport.injectMocks(testSubject);
+  }
+
+
+  @Test
+  public void testShouldLoadKerberosDescriptorWhenKDReferenceFoundInRequest() throws Exception {
+
+    EasyMock.expect(kerberosDescriptorDAO.findByName(TEST_KERBEROS_DESCRIPTOR_REFERENCE)).andReturn(testKDEntity());
+
+    Map<String, Object> reuqestMap = new HashMap<>();
+    Map<String, Object> security = new HashMap<>();
+    security.put(SecurityConfigurationFactory.TYPE_PROPERTY_ID, SecurityType.KERBEROS.toString());
+    security.put(SecurityConfigurationFactory.KERBEROS_DESCRIPTOR_REFERENCE_PROPERTY_ID, TEST_KERBEROS_DESCRIPTOR_REFERENCE);
+    reuqestMap.put(SecurityConfigurationFactory.SECURITY_PROPERTY_ID, security);
+
+    EasyMock.replay(kerberosDescriptorDAO);
+    SecurityConfiguration securityConfiguration = testSubject.createSecurityConfigurationFromRequest(reuqestMap, false);
+
+    EasyMock.verify(kerberosDescriptorDAO);
+    assertTrue(securityConfiguration.getType() == SecurityType.KERBEROS);
+  }
+
+  @Test
+  public void testShouldPersistKDWhenKDFoundInRequest() throws Exception {
+    // GIVEN
+    Capture<KerberosDescriptorEntity> kdEntityCaptor = EasyMock.newCapture();
+    kerberosDescriptorDAO.create(capture(kdEntityCaptor));
+    EasyMock.replay(kerberosDescriptorDAO);
+
+    Map<String, Object> reuqestMap = new HashMap<>();
+    Map<String, Object> security = new HashMap<>();
+    security.put(SecurityConfigurationFactory.TYPE_PROPERTY_ID, SecurityType.KERBEROS.toString());
+    security.put(SecurityConfigurationFactory.KERBEROS_DESCRIPTOR_PROPERTY_ID, testKDReqPropertyMap());
+    reuqestMap.put(SecurityConfigurationFactory.SECURITY_PROPERTY_ID, security);
+
+    // WHEN
+    testSubject.createSecurityConfigurationFromRequest(reuqestMap, true);
+
+
+    // THEN
+    EasyMock.verify(kerberosDescriptorDAO);
+    Assert.assertEquals("The persisted descriptortext is not as expected",
+        "{\"test\":\"{\\\"test\\\":\\\"test json\\\"}\"}",
+        kdEntityCaptor.getValue().getKerberosDescriptorText());
+    Assert.assertNotNull("There is no generated kerberos descriptor reference in the persisting entity!",
+        kdEntityCaptor.getValue().getName());
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testCreateKerberosSecurityWithoutDescriptor() throws Exception {
+    Map<String, Object> reuqestMap = new HashMap<>();
+    Map<String, Object> security = new HashMap<>();
+    security.put(SecurityConfigurationFactory.TYPE_PROPERTY_ID, SecurityType.KERBEROS.toString());
+    reuqestMap.put(SecurityConfigurationFactory.SECURITY_PROPERTY_ID, security);
+
+    SecurityConfiguration securityConfiguration = testSubject.createSecurityConfigurationFromRequest(reuqestMap, false);
+
+    assertTrue(securityConfiguration.getType() == SecurityType.KERBEROS);
+  }
+
+  @Test
+  public void testCreateEmpty() throws Exception {
+    Map<String, Object> reuqestMap = new HashMap<>();
+
+    SecurityConfiguration securityConfiguration = testSubject.createSecurityConfigurationFromRequest(reuqestMap, false);
+
+    assertTrue(securityConfiguration == null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testCreateInvalidSecurityType() throws Exception {
+    Map<String, Object> reuqestMap = new HashMap<>();
+    Map<String, Object> security = new HashMap<>();
+    security.put(SecurityConfigurationFactory.TYPE_PROPERTY_ID, "INVALID_SECURITY_TYPE");
+    reuqestMap.put(SecurityConfigurationFactory.SECURITY_PROPERTY_ID, security);
+
+    SecurityConfiguration securityConfiguration = testSubject.createSecurityConfigurationFromRequest(reuqestMap, false);
+
+    assertTrue(securityConfiguration.getType() == SecurityType.KERBEROS);
+  }
+
+  @Test
+  public void testCreateKerberosSecurityTypeNone() throws Exception {
+    Map<String, Object> reuqestMap = new HashMap<>();
+    Map<String, Object> security = new HashMap<>();
+    security.put(SecurityConfigurationFactory.TYPE_PROPERTY_ID, SecurityType.NONE.toString());
+    reuqestMap.put(SecurityConfigurationFactory.SECURITY_PROPERTY_ID, security);
+
+    SecurityConfiguration securityConfiguration = testSubject.createSecurityConfigurationFromRequest(reuqestMap, false);
+
+    assertTrue(securityConfiguration.getType() == SecurityType.NONE);
+  }
+
+  private KerberosDescriptorEntity testKDEntity() {
+    KerberosDescriptorEntity testDescriptorEntity = new KerberosDescriptorEntity();
+    testDescriptorEntity.setName(TEST_KERBEROS_DESCRIPTOR_REFERENCE);
+    testDescriptorEntity.setKerberosDescriptorText(TEST_KERBEROS_DESCRIPTOR_JSON);
+    return testDescriptorEntity;
+  }
+
+  private Map<String, Object> testKDReqPropertyMap() {
+    Map<String, Object> kdMap = new HashMap<>();
+    kdMap.put("test", TEST_KERBEROS_DESCRIPTOR_JSON);
+    return kdMap;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
index 8eeb54c..bd4f13d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ *    http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -23,9 +23,18 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.internal.ProvisionClusterRequest;
 import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.state.SecurityType;
 import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.easymock.EasyMockRule;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.MockType;
+import org.easymock.TestSubject;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
 
 import java.lang.reflect.Field;
@@ -46,6 +55,7 @@ import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.isA;
+import static org.easymock.EasyMock.isNull;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.same;
@@ -54,6 +64,7 @@ import static org.easymock.EasyMock.verify;
 /**
  * TopologyManager unit tests
  */
+@Ignore("The setup needs to be rethought as it's hard to follow!")
 public class TopologyManagerTest {
 
   private static final String CLUSTER_NAME = "test-cluster";
@@ -62,7 +73,8 @@ public class TopologyManagerTest {
   private static final String STACK_NAME = "test-stack";
   private static final String STACK_VERSION = "test-stack-version";
 
-  private TopologyManager topologyManager;
+  @TestSubject
+  private TopologyManager topologyManager = new TopologyManager();
 
   private final Blueprint blueprint = createNiceMock(Blueprint.class);
   private final Stack stack = createNiceMock(Stack.class);
@@ -122,6 +134,13 @@ public class TopologyManagerTest {
   private Capture<Runnable> updateConfigTaskCapture;
 
 
+  @Rule
+  public EasyMockRule mocks = new EasyMockRule(this);
+
+  @Mock(type = MockType.STRICT)
+  private SecurityConfigurationFactory securityConfigurationFactory;
+
+
   @Before
   public void setup() throws Exception {
     clusterTopologyCapture = new Capture<ClusterTopology>();
@@ -234,7 +253,7 @@ public class TopologyManagerTest {
 
     expect(ambariContext.getPersistedTopologyState()).andReturn(persistedState).anyTimes();
     //todo: don't ignore param
-    ambariContext.createAmbariResources(isA(ClusterTopology.class), eq(CLUSTER_NAME));
+    ambariContext.createAmbariResources(isA(ClusterTopology.class), eq(CLUSTER_NAME), (SecurityType) isNull());
     expectLastCall().once();
     expect(ambariContext.getNextRequestId()).andReturn(1L).once();
     expect(ambariContext.isClusterKerberosEnabled(CLUSTER_ID)).andReturn(false).anyTimes();
@@ -275,11 +294,13 @@ public class TopologyManagerTest {
     f.setAccessible(true);
     f.set(null, ambariContext);
 
-    topologyManager = new TopologyManager();
 
     f = clazz.getDeclaredField("executor");
     f.setAccessible(true);
     f.set(topologyManager, executor);
+
+    EasyMockSupport.injectMocks(topologyManager);
+    EasyMock.replay(securityConfigurationFactory);
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index 0d932cb..b5c845d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -819,6 +819,11 @@ public class UpgradeCatalog213Test {
     Capture<DBAccessor.DBColumnInfo> capturedColumn = EasyMock.newCapture();
     Capture<DBAccessor.DBColumnInfo> capturedHostRoleCommandColumn = EasyMock.newCapture();
 
+    Capture<String> capturedBlueprintTableName = EasyMock.newCapture();
+    Capture<DBAccessor.DBColumnInfo> capturedNewBlueprintColumn1 = EasyMock.newCapture();
+    Capture<DBAccessor.DBColumnInfo> capturedNewBlueprintColumn2 = EasyMock.newCapture();
+
+
     EasyMock.expect(mockedInjector.getInstance(DaoUtils.class)).andReturn(mockedDaoUtils);
     mockedInjector.injectMembers(anyObject(UpgradeCatalog.class));
     EasyMock.expect(mockedConfiguration.getDatabaseType()).andReturn(Configuration.DatabaseType.POSTGRES).anyTimes();
@@ -838,6 +843,9 @@ public class UpgradeCatalog213Test {
     mockedDbAccessor.createTable(capture(capturedTableName), capture(capturedColumns), capture(capturedPKColumn));
     mockedDbAccessor.alterColumn(eq("host_role_command"), capture(capturedHostRoleCommandColumn));
 
+    mockedDbAccessor.addColumn(capture(capturedBlueprintTableName), capture(capturedNewBlueprintColumn1));
+    mockedDbAccessor.addColumn(capture(capturedBlueprintTableName), capture(capturedNewBlueprintColumn2));
+
     mocksControl.replay();
 
     UpgradeCatalog213 testSubject = new UpgradeCatalog213(mockedInjector);
@@ -856,5 +864,12 @@ public class UpgradeCatalog213Test {
     Assert.assertEquals("The primary key is wrong!", "kerberos_descriptor_name", capturedPKColumn.getValue());
     Assert.assertTrue("Ther number of columns is wrong!", capturedColumns.getValue().size() == 2);
 
+    Assert.assertEquals("The table name is wrong!", "blueprint", capturedBlueprintTableName.getValue());
+
+    Assert.assertEquals("The column name is wrong!", "security_type", capturedNewBlueprintColumn1.getValue().getName());
+    Assert.assertEquals("The column name is wrong!", "security_descriptor_reference", capturedNewBlueprintColumn2
+      .getValue().getName());
+
+
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
index e85d9a1..3262364 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
@@ -17,34 +17,13 @@
  */
 package org.apache.ambari.server.utils;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.getCurrentArguments;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-import javax.persistence.EntityManager;
-import javax.xml.bind.JAXBException;
-
+import com.google.common.collect.ContiguousSet;
+import com.google.common.collect.DiscreteDomain;
+import com.google.common.collect.Range;
+import com.google.gson.Gson;
 import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
 import org.apache.ambari.server.actionmanager.Stage;
@@ -52,6 +31,7 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.security.SecurityHelper;
+import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -75,12 +55,31 @@ import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import com.google.common.collect.ContiguousSet;
-import com.google.common.collect.DiscreteDomain;
-import com.google.common.collect.Range;
-import com.google.gson.Gson;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
+import javax.persistence.EntityManager;
+import javax.xml.bind.JAXBException;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.getCurrentArguments;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 public class StageUtilsTest extends EasyMockSupport {
   private static final String STACK_ID = "HDP-1.3.1";
@@ -100,6 +99,7 @@ public class StageUtilsTest extends EasyMockSupport {
         bind(HostFactory.class).toInstance(createNiceMock(HostFactory.class));
         bind(SecurityHelper.class).toInstance(createNiceMock(SecurityHelper.class));
         bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+        bind(CredentialStoreService.class).toInstance(createNiceMock(CredentialStoreService.class));
         bind(TopologyManager.class).toInstance(createNiceMock(TopologyManager.class));
         bind(AmbariMetaInfo.class).toInstance(createMock(AmbariMetaInfo.class));
         bind(Clusters.class).toInstance(createNiceMock(ClustersImpl.class));


[2/3] ambari git commit: AMBARI-13431. Blueprints Configuration to select Kerberos. (Sandor Magyari and Laszlo Puskas via rnettleton)

Posted by rn...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index 64be609..26d8c3e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -18,32 +18,44 @@
 
 package org.apache.ambari.server.topology;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
+import com.google.inject.Singleton;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.controller.RequestStatusResponse;
+import org.apache.ambari.server.controller.internal.ArtifactResourceProvider;
+import org.apache.ambari.server.controller.internal.CredentialResourceProvider;
 import org.apache.ambari.server.controller.internal.ProvisionClusterRequest;
+import org.apache.ambari.server.controller.internal.RequestImpl;
 import org.apache.ambari.server.controller.internal.ScaleClusterRequest;
 import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.orm.dao.HostRoleCommandStatusSummaryDTO;
 import org.apache.ambari.server.orm.entities.StageEntity;
+import org.apache.ambari.server.security.encryption.CredentialStoreService;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.host.HostImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Singleton;
+import javax.inject.Inject;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 
 /**
  * Manages all cluster provisioning actions on the cluster topology.
@@ -54,6 +66,7 @@ public class TopologyManager {
 
   public static final String INITIAL_CONFIG_TAG = "INITIAL";
   public static final String TOPOLOGY_RESOLVED_TAG = "TOPOLOGY_RESOLVED";
+  public static final String KDC_ADMIN_CREDENTIAL = "kdc.admin.credential";
 
   private PersistedState persistedState;
   private ExecutorService executor = Executors.newSingleThreadExecutor();
@@ -72,6 +85,14 @@ public class TopologyManager {
 
   private final Object initializationLock = new Object();
 
+
+  @Inject
+  private SecurityConfigurationFactory securityConfigurationFactory;
+
+  @Inject
+  private CredentialStoreService credentialStoreService;
+
+
   /**
    * A boolean not cached thread-local (volatile) to prevent double-checked
    * locking on the synchronized keyword.
@@ -93,6 +114,7 @@ public class TopologyManager {
           replayRequests(persistedState.getAllRequests());
           isInitialized = true;
         }
+
       }
     }
   }
@@ -100,11 +122,28 @@ public class TopologyManager {
   public RequestStatusResponse provisionCluster(ProvisionClusterRequest request) throws InvalidTopologyException, AmbariException {
     ensureInitialized();
     ClusterTopology topology = new ClusterTopologyImpl(ambariContext, request);
-    String clusterName = request.getClusterName();
+    final String clusterName = request.getClusterName();
 
     // get the id prior to creating ambari resources which increments the counter
     Long provisionId = ambariContext.getNextRequestId();
-    ambariContext.createAmbariResources(topology, clusterName);
+
+    SecurityConfiguration securityConfiguration = processSecurityConfiguration(request);
+    if (securityConfiguration != null && securityConfiguration.getType() == SecurityType.KERBEROS) {
+
+      addKerberosClient(topology);
+
+      // create Cluster resource with security_type = KERBEROS, this will trigger cluster Kerberization
+      // upon host install task execution
+      ambariContext.createAmbariResources(topology, clusterName, securityConfiguration.getType());
+      submitKerberosDescriptorAsArtifact(clusterName, securityConfiguration.getDescriptor());
+      Credential credential = request.getCredentialsMap().get(KDC_ADMIN_CREDENTIAL);
+      if (credential == null) {
+        throw new InvalidTopologyException(KDC_ADMIN_CREDENTIAL + " is missing from request.");
+      }
+      submitCredential(clusterName, credential);
+    } else {
+      ambariContext.createAmbariResources(topology, clusterName, null);
+    }
 
     long clusterId = ambariContext.getClusterId(clusterName);
     topology.setClusterId(clusterId);
@@ -112,19 +151,109 @@ public class TopologyManager {
     // persist request after it has successfully validated
     PersistedTopologyRequest persistedRequest = persistedState.persistTopologyRequest(request);
 
-
     clusterTopologyMap.put(clusterId, topology);
 
     addClusterConfigRequest(topology, new ClusterConfigurationRequest(ambariContext, topology, true));
+
+    final Stack stack = topology.getBlueprint().getStack();
+
     LogicalRequest logicalRequest = processRequest(persistedRequest, topology, provisionId);
 
     //todo: this should be invoked as part of a generic lifecycle event which could possibly
     //todo: be tied to cluster state
-    Stack stack = topology.getBlueprint().getStack();
+
     ambariContext.persistInstallStateForUI(clusterName, stack.getName(), stack.getVersion());
     return getRequestStatus(logicalRequest.getRequestId());
   }
 
+  private void submitCredential(String clusterName, Credential credential) {
+
+    ResourceProvider provider =
+        ambariContext.getClusterController().ensureResourceProvider(Resource.Type.Credential);
+
+    Map<String, Object> properties = new HashMap<>();
+    properties.put(CredentialResourceProvider.CREDENTIAL_CLUSTER_NAME_PROPERTY_ID, clusterName);
+    properties.put(CredentialResourceProvider.CREDENTIAL_ALIAS_PROPERTY_ID, KDC_ADMIN_CREDENTIAL);
+    properties.put(CredentialResourceProvider.CREDENTIAL_PRINCIPAL_PROPERTY_ID, credential.getPrincipal());
+    properties.put(CredentialResourceProvider.CREDENTIAL_KEY_PROPERTY_ID, credential.getKey());
+    properties.put(CredentialResourceProvider.CREDENTIAL_TYPE_PROPERTY_ID, credential.getType().name());
+
+    org.apache.ambari.server.controller.spi.Request request = new RequestImpl(Collections.<String>emptySet(),
+        Collections.singleton(properties), Collections.<String, String>emptyMap(), null);
+
+    try {
+      RequestStatus status = provider.createResources(request);
+      if (status.getStatus() != RequestStatus.Status.Complete) {
+        throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster!");
+      }
+    } catch (SystemException | UnsupportedPropertyException | NoSuchParentResourceException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster: " + e);
+    } catch (ResourceAlreadyExistsException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster as resource already exists.");
+    }
+
+  }
+
+  /**
+   * Retrieve security info from Blueprint if missing from Cluster Template request.
+   *
+   * @param request
+   * @return
+   */
+  private SecurityConfiguration processSecurityConfiguration(ProvisionClusterRequest request) {
+    LOG.debug("Getting security configuration from the request ...");
+    SecurityConfiguration securityConfiguration = request.getSecurityConfiguration();
+
+    if (securityConfiguration == null) {
+      // todo - perform this logic at request creation instead!
+      LOG.debug("There's no security configuration in the request, retrieving it from the associated blueprint");
+      securityConfiguration = request.getBlueprint().getSecurity();
+      if (securityConfiguration.getType() == SecurityType.KERBEROS) {
+        securityConfiguration = securityConfigurationFactory.loadSecurityConfigurationByReference
+          (securityConfiguration.getDescriptorReference());
+      }
+    }
+    return securityConfiguration;
+  }
+
+  private void submitKerberosDescriptorAsArtifact(String clusterName, String descriptor) {
+
+    ResourceProvider artifactProvider =
+        ambariContext.getClusterController().ensureResourceProvider(Resource.Type.Artifact);
+
+    Map<String, Object> properties = new HashMap<>();
+    properties.put(ArtifactResourceProvider.ARTIFACT_NAME_PROPERTY, "kerberos_descriptor");
+    properties.put("Artifacts/cluster_name", clusterName);
+
+    Map<String, String> requestInfoProps = new HashMap<>();
+    requestInfoProps.put(org.apache.ambari.server.controller.spi.Request.REQUEST_INFO_BODY_PROPERTY,
+        "{\"" + ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "\": " + descriptor + "}");
+
+    org.apache.ambari.server.controller.spi.Request request = new RequestImpl(Collections.<String>emptySet(),
+        Collections.singleton(properties), requestInfoProps, null);
+
+    try {
+      RequestStatus status = artifactProvider.createResources(request);
+      try {
+        while (status.getStatus() != RequestStatus.Status.Complete) {
+          LOG.info("Waiting for kerberos_descriptor artifact creation.");
+          Thread.sleep(100);
+        }
+      } catch (InterruptedException e) {
+        LOG.info("Wait for resource creation interrupted!");
+      }
+
+      if (status.getStatus() != RequestStatus.Status.Complete) {
+        throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster!");
+      }
+    } catch (SystemException | UnsupportedPropertyException | NoSuchParentResourceException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster: " + e);
+    } catch (ResourceAlreadyExistsException e) {
+      throw new RuntimeException("Failed to attach kerberos_descriptor artifact to cluster as resource already exists.");
+    }
+
+  }
+
   public RequestStatusResponse scaleHosts(ScaleClusterRequest request)
       throws InvalidTopologyException, AmbariException {
 
@@ -135,10 +264,10 @@ public class TopologyManager {
     ClusterTopology topology = clusterTopologyMap.get(clusterId);
     if (topology == null) {
       throw new InvalidTopologyException("Unable to retrieve cluster topology for cluster. This is most likely a " +
-                                         "result of trying to scale a cluster via the API which was created using " +
-                                         "the Ambari UI. At this time only clusters created via the API using a " +
-                                         "blueprint can be scaled with this API.  If the cluster was originally created " +
-                                         "via the API as described above, please file a Jira for this matter.");
+          "result of trying to scale a cluster via the API which was created using " +
+          "the Ambari UI. At this time only clusters created via the API using a " +
+          "blueprint can be scaled with this API.  If the cluster was originally created " +
+          "via the API as described above, please file a Jira for this matter.");
     }
     request.setClusterId(clusterId);
     PersistedTopologyRequest persistedRequest = persistedState.persistTopologyRequest(request);
@@ -161,7 +290,7 @@ public class TopologyManager {
     // The lock ordering in this method must always be the same ordering as TopologyManager.processRequest
     // TODO: Locking strategies for TopologyManager should be reviewed and possibly rewritten in a future release
     synchronized (availableHosts) {
-      synchronized(reservedHosts) {
+      synchronized (reservedHosts) {
         if (reservedHosts.containsKey(hostName)) {
           LogicalRequest request = reservedHosts.remove(hostName);
           HostOfferResponse response = request.offer(host);
@@ -329,30 +458,30 @@ public class TopologyManager {
 
     // The lock ordering in this method must always be the same ordering as TopologyManager.onHostRegistered
     // TODO: Locking strategies for TopologyManager should be reviewed and possibly rewritten in a future release
-    synchronized(availableHosts) {
+    synchronized (availableHosts) {
       Iterator<HostImpl> hostIterator = availableHosts.iterator();
-      while (! requestHostComplete && hostIterator.hasNext()) {
+      while (!requestHostComplete && hostIterator.hasNext()) {
         HostImpl host = hostIterator.next();
         synchronized (reservedHosts) {
           String hostname = host.getHostName();
-          if (reservedHosts.containsKey(hostname))  {
+          if (reservedHosts.containsKey(hostname)) {
             if (logicalRequest.equals(reservedHosts.get(hostname))) {
               // host is registered to this request, remove it from reserved map
               LOG.info("TopologyManager.processRequest: host name = {} is mapped to LogicalRequest ID = {} and will be removed from the reserved hosts.",
-                hostname, logicalRequest.getRequestId());
+                  hostname, logicalRequest.getRequestId());
               reservedHosts.remove(hostname);
             } else {
               // host is registered with another request, don't offer
               //todo: clean up logic
               LOG.info("TopologyManager.processRequest: host name = {} is registered with another request, and will not be offered to LogicalRequest ID = {}",
-                hostname, logicalRequest.getRequestId());
+                  hostname, logicalRequest.getRequestId());
               continue;
             }
           }
         }
 
         LOG.info("TopologyManager.processRequest: offering host name = {} to LogicalRequest ID = {}",
-          host.getHostName(), logicalRequest.getRequestId());
+            host.getHostName(), logicalRequest.getRequestId());
         HostOfferResponse response = logicalRequest.offer(host);
         switch (response.getAnswer()) {
           case ACCEPTED:
@@ -362,25 +491,25 @@ public class TopologyManager {
             //todo: may affect this behavior??
             hostIterator.remove();
             LOG.info("TopologyManager.processRequest: host name = {} was ACCEPTED by LogicalRequest ID = {} , host has been removed from available hosts.",
-              host.getHostName(), logicalRequest.getRequestId());
+                host.getHostName(), logicalRequest.getRequestId());
             processAcceptedHostOffer(getClusterTopology(logicalRequest.getClusterId()), response, host);
             break;
           case DECLINED_DONE:
             requestHostComplete = true;
             LOG.info("TopologyManager.processRequest: host name = {} was DECLINED_DONE by LogicalRequest ID = {}",
-              host.getHostName(), logicalRequest.getRequestId());
+                host.getHostName(), logicalRequest.getRequestId());
             break;
           case DECLINED_PREDICATE:
             LOG.info("TopologyManager.processRequest: host name = {} was DECLINED_PREDICATE by LogicalRequest ID = {}",
-              host.getHostName(), logicalRequest.getRequestId());
+                host.getHostName(), logicalRequest.getRequestId());
             break;
         }
       }
 
-      if (! requestHostComplete) {
+      if (!requestHostComplete) {
         // not all required hosts have been matched (see earlier comment regarding outstanding logical requests)
         LOG.info("TopologyManager.processRequest: not all required hosts have been matched, so adding LogicalRequest ID = {} to outstanding requests",
-          logicalRequest.getRequestId());
+            logicalRequest.getRequestId());
         synchronized (outstandingRequests) {
           outstandingRequests.add(logicalRequest);
         }
@@ -399,7 +528,7 @@ public class TopologyManager {
 
     allRequests.put(logicalRequest.getRequestId(), logicalRequest);
     LOG.info("TopologyManager.createLogicalRequest: created LogicalRequest with ID = {} and completed persistence of this request.",
-      logicalRequest.getRequestId());
+        logicalRequest.getRequestId());
     synchronized (reservedHosts) {
       for (String host : logicalRequest.getReservedHosts()) {
         reservedHosts.put(host, logicalRequest);
@@ -424,11 +553,11 @@ public class TopologyManager {
     persistedState.registerHostName(response.getHostRequestId(), hostName);
 
     LOG.info("TopologyManager.processAcceptedHostOffer: about to execute tasks for host = {}",
-      hostName);
+        hostName);
 
     for (TopologyTask task : response.getTasks()) {
       LOG.info("Processing accepted host offer for {} which responded {} and task {}",
-        hostName, response.getAnswer(), task.getType());
+          hostName, response.getAnswer(), task.getType());
 
       task.init(topology, ambariContext);
       executor.execute(task);
@@ -444,7 +573,7 @@ public class TopologyManager {
 
       for (LogicalRequest logicalRequest : requestEntry.getValue()) {
         allRequests.put(logicalRequest.getRequestId(), logicalRequest);
-        if (! logicalRequest.hasCompleted()) {
+        if (!logicalRequest.hasCompleted()) {
           outstandingRequests.add(logicalRequest);
           for (String reservedHost : logicalRequest.getReservedHosts()) {
             reservedHosts.put(reservedHost, logicalRequest);
@@ -466,9 +595,9 @@ public class TopologyManager {
         }
       }
 
-      if (! configChecked) {
+      if (!configChecked) {
         configChecked = true;
-        if (! ambariContext.doesConfigurationWithTagExist(topology.getClusterId(), TOPOLOGY_RESOLVED_TAG)) {
+        if (!ambariContext.doesConfigurationWithTagExist(topology.getClusterId(), TOPOLOGY_RESOLVED_TAG)) {
           LOG.info("TopologyManager.replayRequests: no config with TOPOLOGY_RESOLVED found, adding cluster config request");
           addClusterConfigRequest(topology, new ClusterConfigurationRequest(ambariContext, topology, false));
         }
@@ -478,7 +607,6 @@ public class TopologyManager {
 
   //todo: this should invoke a callback on each 'service' in the topology
   private void finalizeTopology(TopologyRequest request, ClusterTopology topology) {
-    addKerberosClientIfNecessary(topology);
   }
 
   private boolean isHostIgnored(String host) {
@@ -490,11 +618,9 @@ public class TopologyManager {
    *
    * @param topology  cluster topology
    */
-  private void addKerberosClientIfNecessary(ClusterTopology topology) {
-    if (topology.isClusterKerberosEnabled()) {
-      for (HostGroup group : topology.getBlueprint().getHostGroups().values()) {
-        group.addComponent("KERBEROS_CLIENT");
-      }
+  private void addKerberosClient(ClusterTopology topology) {
+    for (HostGroup group : topology.getBlueprint().getHostGroups().values()) {
+      group.addComponent("KERBEROS_CLIENT");
     }
   }
 
@@ -529,7 +655,7 @@ public class TopologyManager {
       boolean interrupted = false;
 
       Collection<String> requiredHostGroups = getTopologyRequiredHostGroups();
-      while (! completed && ! interrupted) {
+      while (!completed && !interrupted) {
         try {
           Thread.sleep(100);
         } catch (InterruptedException e) {
@@ -543,7 +669,7 @@ public class TopologyManager {
 
       LOG.info("TopologyManager.ConfigureClusterTask: All Required host groups are completed, Cluster Configuration can now begin");
 
-      if (! interrupted) {
+      if (!interrupted) {
         try {
           LOG.info("TopologyManager.ConfigureClusterTask: Setting Configuration on cluster");
           // sets updated configuration on topology and cluster
@@ -591,12 +717,12 @@ public class TopologyManager {
           configTopologyResolved = false;
           if (groupInfo != null) {
             LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} requires {} hosts to be mapped, but only {} are available.",
-              groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+                groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
           }
           break;
         } else {
           LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} has been fully resolved, as all {} required hosts are mapped to {} physical hosts.",
-            groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+              groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
         }
       }
       return configTopologyResolved;

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
index da0c9e1..49009ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactory.java
@@ -27,6 +27,7 @@ import java.util.Map;
  * Factory for creating topology requests.
  */
 public interface TopologyRequestFactory {
-  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties) throws InvalidTopologyTemplateException;
+
+  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties, SecurityConfiguration securityConfiguration) throws InvalidTopologyTemplateException;
   // todo: use to create other request types
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
index 8b23b44..71878a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyRequestFactoryImpl.java
@@ -28,7 +28,8 @@ import java.util.Map;
  */
 public class TopologyRequestFactoryImpl implements TopologyRequestFactory {
   @Override
-  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties) throws InvalidTopologyTemplateException {
-    return new ProvisionClusterRequest(properties);
+  public ProvisionClusterRequest createProvisionClusterRequest(Map<String, Object> properties, SecurityConfiguration securityConfiguration) throws InvalidTopologyTemplateException {
+    return new ProvisionClusterRequest(properties, securityConfiguration);
+
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index a3d3c53..9d06595 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -113,6 +113,10 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
   private static final String KERBEROS_DESCRIPTOR_NAME_COLUMN = "kerberos_descriptor_name";
   private static final String KERBEROS_DESCRIPTOR_COLUMN = "kerberos_descriptor";
 
+  private static final String BLUEPRINT_TABLE = "blueprint";
+  private static final String SECURITY_TYPE_COLUMN = "security_type";
+  private static final String SECURITY_DESCRIPTOR_REF_COLUMN = "security_descriptor_reference";
+
   /**
    * Logger.
    */
@@ -171,6 +175,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     dbAccessor.alterColumn(HOST_ROLE_COMMAND_TABLE, new DBColumnInfo(HOST_ID_COL, Long.class, null, null, true));
 
     addKerberosDescriptorTable();
+    executeBlueprintDDLUpdates();
   }
 
   protected void executeUpgradeDDLUpdates() throws AmbariException, SQLException {
@@ -186,9 +191,16 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     dbAccessor.createTable(KERBEROS_DESCRIPTOR_TABLE, columns, KERBEROS_DESCRIPTOR_NAME_COLUMN);
   }
 
-  /**
-   * {@inheritDoc}
-   */
+  private void executeBlueprintDDLUpdates() throws AmbariException, SQLException {
+    dbAccessor.addColumn(BLUEPRINT_TABLE, new DBAccessor.DBColumnInfo(SECURITY_TYPE_COLUMN,
+      String.class, 32, "NONE", false));
+    dbAccessor.addColumn(BLUEPRINT_TABLE, new DBAccessor.DBColumnInfo(SECURITY_DESCRIPTOR_REF_COLUMN,
+      String.class, null, null, true));
+  }
+
+    /**
+     * {@inheritDoc}
+     */
   @Override
   protected void executePreDMLUpdates() throws AmbariException, SQLException {
     // execute DDL updates

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 9330310..62d8054 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -397,6 +397,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id BIGINT NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 8b36f9e..fe024ce 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -388,6 +388,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR2(255) NOT NULL,
   stack_id NUMBER(19) NOT NULL,
+  security_type VARCHAR2(32) DEFAULT 'NONE' NOT NULL,
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 764396e..3ae65ee 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -389,6 +389,8 @@ CREATE TABLE requestschedulebatchrequest (
 
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   stack_id BIGINT NOT NULL,
   PRIMARY KEY(blueprint_name)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index dbca53e..c014443 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -437,6 +437,8 @@ GRANT ALL PRIVILEGES ON TABLE ambari.requestschedulebatchrequest TO :username;
 CREATE TABLE ambari.blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id BIGINT NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 2f93825..bba17a5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -386,6 +386,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id NUMERIC(19) NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY(blueprint_name)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index ef90205..60938c3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -444,6 +444,8 @@ CREATE TABLE requestschedulebatchrequest (
 CREATE TABLE blueprint (
   blueprint_name VARCHAR(255) NOT NULL,
   stack_id BIGINT NOT NULL,
+  security_type VARCHAR(32) NOT NULL DEFAULT 'NONE',
+  security_descriptor_reference VARCHAR(255),
   PRIMARY KEY CLUSTERED (blueprint_name)
   );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index e63bee4..a9c85cc 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -347,6 +347,7 @@
         "Blueprints/blueprint_name",
         "Blueprints/stack_name",
         "Blueprints/stack_version",
+        "Blueprints/security",
         "host_groups",
         "host_groups/components",
         "host_groups/cardinality",

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
index 71226a1..4f0263b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertResourceProviderTest.java
@@ -17,27 +17,11 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMORY_URL;
-import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMROY_DRIVER;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-
-import javax.persistence.EntityManager;
-
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.util.Modules;
 import org.apache.ambari.server.api.query.render.AlertStateSummary;
 import org.apache.ambari.server.api.query.render.AlertSummaryGroupedRenderer;
 import org.apache.ambari.server.api.query.render.AlertSummaryGroupedRenderer.AlertDefinitionSummary;
@@ -73,11 +57,26 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.util.Modules;
+import javax.persistence.EntityManager;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMORY_URL;
+import static org.apache.ambari.server.configuration.Configuration.JDBC_IN_MEMROY_DRIVER;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test the AlertResourceProvider class
@@ -577,6 +576,9 @@ public class AlertResourceProviderTest {
       expect(configuration.getDatabaseUser()).andReturn("test").anyTimes();
       expect(configuration.getDatabasePassword()).andReturn("test").anyTimes();
       expect(configuration.getAlertEventPublisherPoolSize()).andReturn(Integer.valueOf(Configuration.ALERTS_EXECUTION_SCHEDULER_THREADS_DEFAULT)).anyTimes();
+      expect(configuration.getMasterKeyLocation()).andReturn(new File("/test")).anyTimes();
+      expect(configuration.getTemporaryKeyStoreRetentionMinutes()).andReturn(2l).anyTimes();
+      expect(configuration.isActivelyPurgeTemporaryKeyStore()).andReturn(true).anyTimes();
       replay(configuration);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
index 5bfdebb..369bf02 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
@@ -18,32 +18,7 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.gson.Gson;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategy;
@@ -55,10 +30,10 @@ import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
-import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.BlueprintDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
@@ -69,17 +44,45 @@ import org.apache.ambari.server.orm.entities.HostGroupComponentEntity;
 import org.apache.ambari.server.orm.entities.HostGroupConfigEntity;
 import org.apache.ambari.server.orm.entities.HostGroupEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
-import org.apache.ambari.server.utils.StageUtils;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.topology.Blueprint;
 import org.apache.ambari.server.topology.BlueprintFactory;
 import org.apache.ambari.server.topology.InvalidTopologyException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
+import org.apache.ambari.server.utils.StageUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
-import com.google.gson.Gson;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * BlueprintResourceProvider unit tests.
@@ -98,13 +101,13 @@ public class BlueprintResourceProviderTest {
   private final static Blueprint blueprint = createMock(Blueprint.class);
   private final static AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class);
   private final static BlueprintFactory blueprintFactory = createMock(BlueprintFactory.class);
+  private final static SecurityConfigurationFactory securityFactory = createMock(SecurityConfigurationFactory.class);
   private final static BlueprintResourceProvider provider = createProvider();
   private final static Gson gson = new Gson();
 
-
   @BeforeClass
   public static void initClass() {
-    BlueprintResourceProvider.init(blueprintFactory, dao, gson);
+    BlueprintResourceProvider.init(blueprintFactory, dao, securityFactory, gson);
 
     StackEntity stackEntity = new StackEntity();
     stackEntity.setStackName("test-stack-name");
@@ -112,15 +115,14 @@ public class BlueprintResourceProviderTest {
 
     expect(
         stackDAO.find(anyObject(String.class),
-            anyObject(String.class))).andReturn(stackEntity).anyTimes();
-
+          anyObject(String.class))).andReturn(stackEntity).anyTimes();
     replay(stackDAO);
 
   }
 
   @Before
   public void resetGlobalMocks() {
-    reset(dao, metaInfo, blueprintFactory, blueprint, entity);
+    reset(dao, metaInfo, blueprintFactory, securityFactory, blueprint, entity);
   }
 
   @Test
@@ -133,7 +135,8 @@ public class BlueprintResourceProviderTest {
     Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
+    expect(securityFactory.createSecurityConfigurationFromRequest(null, true)).andReturn(null).anyTimes();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -143,7 +146,7 @@ public class BlueprintResourceProviderTest {
     expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null);
     dao.create(entity);
 
-    replay(dao, entity, metaInfo, blueprintFactory, blueprint, request, managementController);
+    replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request, managementController);
     // end expectations
 
     ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
@@ -164,7 +167,7 @@ public class BlueprintResourceProviderTest {
     assertEquals(request, lastEvent.getRequest());
     assertNull(lastEvent.getPredicate());
 
-    verify(dao, entity, blueprintFactory, metaInfo, request, managementController);
+    verify(dao, entity, blueprintFactory, securityFactory, metaInfo, request, managementController);
   }
 
   @Test()
@@ -211,7 +214,7 @@ public class BlueprintResourceProviderTest {
     requestInfoProperties.put("validate_topology", "false");
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     expect(blueprint.toEntity()).andReturn(entity);
     expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce();
@@ -252,7 +255,7 @@ public class BlueprintResourceProviderTest {
     Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce();
     blueprint.validateTopology();
@@ -295,7 +298,7 @@ public class BlueprintResourceProviderTest {
     Request request = createMock(Request.class);
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -340,12 +343,13 @@ public class BlueprintResourceProviderTest {
     Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andThrow(
-        new IllegalArgumentException("Blueprint name must be provided"));
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andThrow(
+      new IllegalArgumentException("Blueprint name must be provided"));
+    expect(securityFactory.createSecurityConfigurationFromRequest(null,true)).andReturn(null).anyTimes();
     expect(request.getProperties()).andReturn(setProperties);
     expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties);
 
-    replay(dao, entity, metaInfo, blueprintFactory, blueprint, request);
+    replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request);
     // end expectations
 
     try {
@@ -357,6 +361,51 @@ public class BlueprintResourceProviderTest {
     verify(dao, entity, blueprintFactory, metaInfo, request);
   }
 
+  @Test
+  public void testCreateResources_withSecurityConfiguration() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    Request request = createMock(Request.class);
+
+    Set<Map<String, Object>> setProperties = getBlueprintTestProperties();
+    Map<String, String> requestInfoProperties = getTestRequestInfoProperties();
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
+
+    // set expectations
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+      (securityConfiguration).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), securityConfiguration)).andReturn(blueprint).once();
+    blueprint.validateRequiredProperties();
+    blueprint.validateTopology();
+    expect(blueprint.toEntity()).andReturn(entity);
+    expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce();
+    expect(request.getProperties()).andReturn(setProperties);
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties);
+    expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null);
+    dao.create(entity);
+
+    replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request, managementController);
+    // end expectations
+
+    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
+      Resource.Type.Blueprint,
+      PropertyHelper.getPropertyIds(Resource.Type.Blueprint),
+      PropertyHelper.getKeyPropertyIds(Resource.Type.Blueprint),
+      managementController);
+
+    AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
+    ((ObservableResourceProvider)provider).addObserver(observer);
+
+    provider.createResources(request);
+
+    ResourceProviderEvent lastEvent = observer.getLastEvent();
+    assertNotNull(lastEvent);
+    assertEquals(Resource.Type.Blueprint, lastEvent.getResourceType());
+    assertEquals(ResourceProviderEvent.Type.Create, lastEvent.getType());
+    assertEquals(request, lastEvent.getRequest());
+    assertNull(lastEvent.getPredicate());
+
+    verify(dao, entity, blueprintFactory, metaInfo, request, managementController);
+  }
 
   @Test
   public void testGetResourcesNoPredicate() throws SystemException, UnsupportedPropertyException,
@@ -443,7 +492,7 @@ public class BlueprintResourceProviderTest {
     Request request = createMock(Request.class);
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -488,7 +537,7 @@ public class BlueprintResourceProviderTest {
     Request request = createMock(Request.class);
 
     // set expectations
-    expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once();
+    expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once();
     blueprint.validateRequiredProperties();
     blueprint.validateTopology();
     expect(blueprint.toEntity()).andReturn(entity);
@@ -648,6 +697,10 @@ public class BlueprintResourceProviderTest {
     return Collections.singleton(mapProperties);
   }
 
+  public static Map<String, Object> getBlueprintRawBodyProperties() {
+    return new HashMap<String, Object>();
+  }
+
   public static void setConfigurationProperties(Set<Map<String, Object>> properties ) {
     Map<String, String> clusterProperties = new HashMap<String, String>();
     clusterProperties.put("core-site/properties/fs.trash.interval", "480");
@@ -733,6 +786,7 @@ public class BlueprintResourceProviderTest {
       assertEquals(1, finalAttrs.size());
       assertEquals("true", finalAttrs.get("ipc.client.idlethreshold"));
     }
+
   }
 
   private static BlueprintResourceProvider createProvider() {
@@ -794,7 +848,6 @@ public class BlueprintResourceProviderTest {
     return setPropertiesInfo;
   }
 
-
   @Test
   public void testPopulateConfigurationEntity_oldSchema() throws Exception {
     Map<String, String> configuration = new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index 0b2e905..f885a5b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -18,27 +18,7 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
+import com.google.gson.Gson;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ClusterRequest;
 import org.apache.ambari.server.controller.ClusterResponse;
@@ -56,8 +36,9 @@ import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.topology.Blueprint;
 import org.apache.ambari.server.topology.BlueprintFactory;
 import org.apache.ambari.server.topology.InvalidTopologyException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
 import org.apache.ambari.server.topology.TopologyManager;
-import org.apache.ambari.server.topology.TopologyRequest;
 import org.apache.ambari.server.topology.TopologyRequestFactory;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
@@ -66,6 +47,29 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+
 
 /**
  * ClusterResourceProvider tests.
@@ -80,41 +84,108 @@ public class ClusterResourceProviderTest {
   private static final Request request = createNiceMock(Request.class);
   private static final TopologyManager topologyManager = createStrictMock(TopologyManager.class);
   private static final TopologyRequestFactory topologyFactory = createStrictMock(TopologyRequestFactory.class);
+  private final static SecurityConfigurationFactory securityFactory = createMock(SecurityConfigurationFactory.class);
   private static final ProvisionClusterRequest topologyRequest = createNiceMock(ProvisionClusterRequest.class);
   private static final BlueprintFactory blueprintFactory = createStrictMock(BlueprintFactory.class);
   private static final Blueprint blueprint = createNiceMock(Blueprint.class);
   private static final RequestStatusResponse requestStatusResponse = createNiceMock(RequestStatusResponse.class);
+  private static final Gson gson = new Gson();
 
   @Before
   public void setup() throws Exception{
-    ClusterResourceProvider.init(topologyManager, topologyFactory);
+    ClusterResourceProvider.init(topologyManager, topologyFactory, securityFactory, gson);
     ProvisionClusterRequest.init(blueprintFactory);
     provider = new ClusterResourceProvider(controller);
 
     expect(blueprintFactory.getBlueprint(BLUEPRINT_NAME)).andReturn(blueprint).anyTimes();
+    expect(securityFactory.createSecurityConfigurationFromRequest(null, false)).andReturn(null).anyTimes();
   }
 
   @After
   public void tearDown() {
-    reset(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, requestStatusResponse, blueprint);
+    reset(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, securityFactory,
+      requestStatusResponse, blueprint);
   }
 
   private void replayAll() {
-    replay(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, requestStatusResponse, blueprint);
+    replay(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, securityFactory,
+      requestStatusResponse, blueprint);
   }
 
   private void verifyAll() {
-    verify(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, requestStatusResponse, blueprint);
+    verify(request, topologyManager, topologyFactory, topologyRequest, blueprintFactory, securityFactory,
+      requestStatusResponse, blueprint);
   }
 
   @Test
   public void testCreateResource_blueprint() throws Exception {
     Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
     Map<String, Object> properties = requestProperties.iterator().next();
+    Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{}");
 
     // set expectations
     expect(request.getProperties()).andReturn(requestProperties).anyTimes();
-    expect(topologyFactory.createProvisionClusterRequest(properties)).andReturn(topologyRequest).once();
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
+
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn(null)
+      .once();
+    expect(topologyFactory.createProvisionClusterRequest(properties, null)).andReturn(topologyRequest).once();
+    expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
+    expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
+
+    replayAll();
+    RequestStatus requestStatus = provider.createResources(request);
+    assertEquals(5150L, requestStatus.getRequestResource().getPropertyValue(PropertyHelper.getPropertyId("Requests", "id")));
+    assertEquals(Resource.Type.Request, requestStatus.getRequestResource().getType());
+    assertEquals("Accepted", requestStatus.getRequestResource().getPropertyValue(PropertyHelper.getPropertyId("Requests", "status")));
+
+    verifyAll();
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testCreateResource_blueprint_withInvalidSecurityConfiguration() throws Exception {
+    Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    Map<String, Object> properties = requestProperties.iterator().next();
+    Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{\"security\" : {\n\"type\" : \"NONE\"," +
+      "\n\"kerberos_descriptor_reference\" : " + "\"testRef\"\n}}");
+    SecurityConfiguration blueprintSecurityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef",
+      null);
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.NONE, null, null);
+
+    // set expectations
+    expect(request.getProperties()).andReturn(requestProperties).anyTimes();
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
+
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+      (securityConfiguration).once();
+    expect(topologyFactory.createProvisionClusterRequest(properties, securityConfiguration)).andReturn(topologyRequest).once();
+    expect(topologyRequest.getBlueprint()).andReturn(blueprint).anyTimes();
+    expect(blueprint.getSecurity()).andReturn(blueprintSecurityConfiguration).anyTimes();
+    expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
+
+    replayAll();
+    RequestStatus requestStatus = provider.createResources(request);
+  }
+
+  @Test
+  public void testCreateResource_blueprint_withSecurityConfiguration() throws Exception {
+    Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    Map<String, Object> properties = requestProperties.iterator().next();
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
+
+    Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{\"security\" : {\n\"type\" : \"KERBEROS\",\n\"kerberos_descriptor_reference\" : " +
+      "\"testRef\"\n}}");
+
+        // set expectations
+    expect(request.getProperties()).andReturn(requestProperties).anyTimes();
+    expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
+
+    expect(topologyFactory.createProvisionClusterRequest(properties, securityConfiguration)).andReturn(topologyRequest).once();
+    expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+      (securityConfiguration).once();
     expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
     expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
 
@@ -135,7 +206,8 @@ public class ClusterResourceProviderTest {
     // set expectations
     expect(request.getProperties()).andReturn(requestProperties).anyTimes();
     // throw exception from topology request factory an assert that the correct exception is thrown from resource provider
-    expect(topologyFactory.createProvisionClusterRequest(properties)).andThrow(new InvalidTopologyException("test"));
+    expect(topologyFactory.createProvisionClusterRequest(properties, null)).andThrow(new InvalidTopologyException
+      ("test"));
 
     replayAll();
     provider.createResources(request);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
index 21aafc5..5ace2a5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequestTest.java
@@ -36,9 +36,11 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
@@ -97,9 +99,9 @@ public class ProvisionClusterRequestTest {
     // reset host resource provider expectations to none since we are not specifying a host predicate
     reset(hostResourceProvider);
     replay(hostResourceProvider);
-
     Map<String, Object> properties = createBlueprintRequestPropertiesNameOnly(CLUSTER_NAME, BLUEPRINT_NAME);
-    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
 
     assertEquals(CLUSTER_NAME, provisionClusterRequest.getClusterName());
     assertEquals(TopologyRequest.Type.PROVISION, provisionClusterRequest.getType());
@@ -149,9 +151,9 @@ public class ProvisionClusterRequestTest {
     // reset host resource provider expectations to none since we are not specifying a host predicate
     reset(hostResourceProvider);
     replay(hostResourceProvider);
-
     Map<String, Object> properties = createBlueprintRequestPropertiesCountOnly(CLUSTER_NAME, BLUEPRINT_NAME);
-    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
 
     assertEquals(CLUSTER_NAME, provisionClusterRequest.getClusterName());
     assertEquals(TopologyRequest.Type.PROVISION, provisionClusterRequest.getType());
@@ -203,7 +205,7 @@ public class ProvisionClusterRequestTest {
   @Test
   public void testMultipleGroups() throws Exception {
     Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
-    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties);
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
 
     assertEquals(CLUSTER_NAME, provisionClusterRequest.getClusterName());
     assertEquals(TopologyRequest.Type.PROVISION, provisionClusterRequest.getType());
@@ -279,7 +281,43 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
+  }
+
+  @Test
+  public void test_Creditentials() throws Exception {
+    Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    HashMap<String, String> credentialHashMap = new HashMap<>();
+    credentialHashMap.put("alias", "testAlias");
+    credentialHashMap.put("principal", "testPrincipal");
+    credentialHashMap.put("key", "testKey");
+    credentialHashMap.put("type", "temporary");
+    Set<Map<String, String>> credentialsSet = new HashSet<>();
+    credentialsSet.add(credentialHashMap);
+    properties.put("credentials", credentialsSet);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
+
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getAlias(), "testAlias");
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getPrincipal(), "testPrincipal");
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getKey(), "testKey");
+    assertEquals(provisionClusterRequest.getCredentialsMap().get("testAlias").getType().name(), "TEMPORARY");
+  }
+
+
+  @Test(expected=InvalidTopologyTemplateException.class)
+  public void test_CreditentialsInvalidType() throws Exception {
+    Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
+    HashMap<String, String> credentialHashMap = new HashMap<>();
+    credentialHashMap.put("alias", "testAlias");
+    credentialHashMap.put("principal", "testPrincipal");
+    credentialHashMap.put("key", "testKey");
+    credentialHashMap.put("type", "testType");
+    Set<Map<String, String>> credentialsSet = new HashSet<>();
+    credentialsSet.add(credentialHashMap);
+    properties.put("credentials", credentialsSet);
+
+    ProvisionClusterRequest provisionClusterRequest = new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected= InvalidTopologyTemplateException.class)
@@ -291,7 +329,7 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected= InvalidTopologyTemplateException.class)
@@ -303,7 +341,7 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected = InvalidTopologyTemplateException.class)
@@ -323,14 +361,14 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     replay(hostResourceProvider);
     // should result in an exception
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test
   public void testGetValidators_noDefaultPassword() throws Exception {
     Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
     //properties.put("default_password", "pwd");
-    TopologyRequest request = new ProvisionClusterRequest(properties);
+    TopologyRequest request = new ProvisionClusterRequest(properties, null);
     List<TopologyValidator> validators = request.getTopologyValidators();
 
     assertEquals(1, validators.size());
@@ -344,7 +382,7 @@ public class ProvisionClusterRequestTest {
   public void testGetValidators_defaultPassword() throws Exception {
     Map<String, Object> properties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
     properties.put("default_password", "pwd");
-    TopologyRequest request = new ProvisionClusterRequest(properties);
+    TopologyRequest request = new ProvisionClusterRequest(properties, null);
     List<TopologyValidator> validators = request.getTopologyValidators();
 
     assertEquals(1, validators.size());
@@ -359,11 +397,11 @@ public class ProvisionClusterRequestTest {
     reset(hostResourceProvider);
     // checkPropertyIds() returns invalid property names
     expect(hostResourceProvider.checkPropertyIds(Collections.singleton("Hosts/host_name"))).
-        andReturn(Collections.singleton("Hosts/host_name"));
+      andReturn(Collections.singleton("Hosts/host_name"));
     replay(hostResourceProvider);
 
     // should result in an exception due to invalid property in host predicate
-    new ProvisionClusterRequest(createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME));
+    new ProvisionClusterRequest(createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME), null);
   }
 
   @Test(expected = InvalidTopologyTemplateException.class)
@@ -375,7 +413,7 @@ public class ProvisionClusterRequestTest {
     Map<String, Object> properties = createBlueprintRequestPropertiesNameOnly(CLUSTER_NAME, BLUEPRINT_NAME);
     ((Map) ((List) properties.get("host_groups")).iterator().next()).put("host_count", "5");
     // should result in an exception due to both host name and host count being specified
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   @Test(expected = InvalidTopologyTemplateException.class)
@@ -387,7 +425,7 @@ public class ProvisionClusterRequestTest {
     Map<String, Object> properties = createBlueprintRequestPropertiesNameOnly(CLUSTER_NAME, BLUEPRINT_NAME);
     ((Map) ((List) properties.get("host_groups")).iterator().next()).put("host_predicate", "Hosts/host_name=myTestHost");
     // should result in an exception due to both host name and host count being specified
-    new ProvisionClusterRequest(properties);
+    new ProvisionClusterRequest(properties, null);
   }
 
   public static Map<String, Object> createBlueprintRequestProperties(String clusterName, String blueprintName) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
index dff0a62..4c8892e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/WidgetResourceProviderTest.java
@@ -22,16 +22,17 @@ import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.util.Modules;
-import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.WidgetEntity;
+import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.easymock.Capture;
@@ -510,6 +511,9 @@ public class WidgetResourceProviderTest {
           EasyMock.createNiceMock(Clusters.class));
       binder.bind(Cluster.class).toInstance(
               EasyMock.createNiceMock(Cluster.class));
+      binder.bind(CredentialStoreService.class).toInstance(
+        EasyMock.createNiceMock(CredentialStoreService.class)
+      );
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index 45e7da1..0237ae7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -17,26 +17,14 @@
  */
 package org.apache.ambari.server.state;
 
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
+import com.google.gson.Gson;
 import com.google.gson.reflect.TypeToken;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.persist.PersistService;
+import com.google.inject.util.Modules;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -45,30 +33,43 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
-import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
-import org.apache.ambari.server.orm.dao.StackDAO;
-import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.stack.HostsType;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.state.UpgradeHelper.UpgradeGroupHolder;
 import org.apache.ambari.server.state.stack.ConfigUpgradePack;
 import org.apache.ambari.server.state.stack.UpgradePack;
-import org.apache.ambari.server.state.stack.upgrade.*;
+import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition;
+import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.ManualTask;
+import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
+import org.apache.ambari.server.state.stack.upgrade.Task;
+import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
-import com.google.inject.persist.PersistService;
-import com.google.inject.util.Modules;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Tests the {@link UpgradeHelper} class

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
index a1e2b48..2d9d779 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
@@ -18,18 +18,11 @@
 
 package org.apache.ambari.server.state.cluster;
 
-import static org.junit.Assert.fail;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import junit.framework.Assert;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -52,6 +45,7 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.RepositoryVersionState;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
@@ -61,12 +55,16 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Guice;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
-import junit.framework.Assert;
+import static org.junit.Assert.fail;
 
 public class ClustersTest {
 
@@ -165,6 +163,19 @@ public class ClustersTest {
 
   }
 
+  @Test
+  public void testAddAndGetClusterWithSecurityType() throws AmbariException {
+    StackId stackId = new StackId("HDP-2.1.1");
+
+    String c1 = "foo";
+    SecurityType securityType = SecurityType.KERBEROS;
+    clusters.addCluster(c1, stackId, securityType);
+
+    Assert.assertNotNull(clusters.getCluster(c1));
+
+    Assert.assertEquals(c1, clusters.getCluster(c1).getClusterName());
+    Assert.assertEquals(securityType, clusters.getCluster(c1).getSecurityType());
+  }
 
   @Test
   public void testAddAndGetHost() throws AmbariException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
index 1038b60..254d3a3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
@@ -18,16 +18,6 @@
 
 package org.apache.ambari.server.topology;
 
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ClusterRequest;
 import org.apache.ambari.server.controller.ConfigGroupRequest;
@@ -54,11 +44,19 @@ import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.easymock.Capture;
-import org.easymock.EasyMockSupport;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
@@ -257,7 +255,7 @@ public class AmbariContextTest {
     replayAll();
 
     // test
-    context.createAmbariResources(topology, CLUSTER_NAME);
+    context.createAmbariResources(topology, CLUSTER_NAME, null);
 
     // assertions
     ClusterRequest clusterRequest = clusterRequestCapture.getValue();

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
index 31b8f5c..3a3b6dc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java
@@ -120,7 +120,7 @@ public class BlueprintFactoryTest {
     Map<String, Object> props = BlueprintResourceProviderTest.getBlueprintTestProperties().iterator().next();
 
     replay(stack, dao, entity, configEntity);
-    Blueprint blueprint = testFactory.createBlueprint(props);
+    Blueprint blueprint = testFactory.createBlueprint(props, null);
 
     assertEquals(BLUEPRINT_NAME, blueprint.getName());
     assertSame(stack, blueprint.getStack());
@@ -187,7 +187,7 @@ public class BlueprintFactoryTest {
     props.remove(BlueprintResourceProvider.BLUEPRINT_NAME_PROPERTY_ID);
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -197,7 +197,7 @@ public class BlueprintFactoryTest {
     ((Set<Map<String, Object>>) props.get(BlueprintResourceProvider.HOST_GROUP_PROPERTY_ID)).clear();
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -207,7 +207,7 @@ public class BlueprintFactoryTest {
     ((Set<Map<String, Object>>) props.get(BlueprintResourceProvider.HOST_GROUP_PROPERTY_ID)).iterator().next().remove("name");
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -218,7 +218,7 @@ public class BlueprintFactoryTest {
         iterator().next().remove(BlueprintResourceProvider.COMPONENT_PROPERTY_ID);
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -229,7 +229,7 @@ public class BlueprintFactoryTest {
         iterator().next().get(BlueprintResourceProvider.COMPONENT_PROPERTY_ID)).iterator().next().put("name", "INVALID_COMPONENT");
 
     replay(stack, dao, entity, configEntity);
-    testFactory.createBlueprint(props);
+    testFactory.createBlueprint(props, null);
   }
 
   private class TestBlueprintFactory extends BlueprintFactory {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
index 9d4163a..2199247 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
@@ -19,6 +19,8 @@
 package org.apache.ambari.server.topology;
 
 import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+import org.apache.ambari.server.state.SecurityType;
 import org.junit.Test;
 
 import java.util.Collection;
@@ -31,6 +33,7 @@ import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 /**
@@ -119,10 +122,14 @@ public class BlueprintImplTest {
     // for this basic test not ensuring that stack properties are ignored, this is tested in another test
     Configuration configuration = new Configuration(properties, attributes, EMPTY_CONFIGURATION);
 
-    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration);
+    SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, securityConfiguration);
     blueprint.validateRequiredProperties();
+    BlueprintEntity entity = blueprint.toEntity();
 
     verify(stack, group1, group2);
+    assertTrue(entity.getSecurityType() == SecurityType.KERBEROS);
+    assertTrue(entity.getSecurityDescriptorReference().equals("testRef"));
   }
 
   @Test
@@ -192,7 +199,7 @@ public class BlueprintImplTest {
     // for this basic test not ensuring that stack properties are ignored, this is tested in another test
     Configuration configuration = new Configuration(properties, attributes, EMPTY_CONFIGURATION);
 
-    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration);
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     try {
       blueprint.validateRequiredProperties();
       fail("Expected exception to be thrown for missing config property");
@@ -276,10 +283,13 @@ public class BlueprintImplTest {
 
     replay(stack, group1, group2);
 
-    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration);
+    Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
+    BlueprintEntity entity = blueprint.toEntity();
 
     verify(stack, group1, group2);
+    assertTrue(entity.getSecurityType() == SecurityType.NONE);
+    assertTrue(entity.getSecurityDescriptorReference() == null);
   }
 
   //todo: ensure coverage for these existing tests


[3/3] ambari git commit: AMBARI-13431. Blueprints Configuration to select Kerberos. (Sandor Magyari and Laszlo Puskas via rnettleton)

Posted by rn...@apache.org.
AMBARI-13431. Blueprints Configuration to select Kerberos. (Sandor Magyari and Laszlo Puskas via rnettleton)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6e67b5e5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6e67b5e5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6e67b5e5

Branch: refs/heads/branch-2.1
Commit: 6e67b5e547b48787ff1596a180d5b647e60b366e
Parents: 614893c
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Thu Oct 29 11:15:55 2015 -0400
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Thu Oct 29 11:19:02 2015 -0400

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java         |  25 +-
 .../ambari/server/controller/AmbariServer.java  | 103 +++++----
 .../server/controller/ControllerModule.java     |   4 +-
 .../controller/internal/BaseClusterRequest.java |  21 +-
 .../BlueprintConfigurationProcessor.java        |   5 +
 .../internal/BlueprintResourceProvider.java     |  60 +++--
 .../internal/ClusterResourceProvider.java       |  74 ++++--
 .../internal/ExportBlueprintRequest.java        |   2 +-
 .../KerberosDescriptorResourceProvider.java     |   6 +
 .../internal/ProvisionClusterRequest.java       |  53 ++++-
 .../server/orm/entities/BlueprintEntity.java    |  34 ++-
 .../apache/ambari/server/state/Clusters.java    |  18 +-
 .../server/state/cluster/ClustersImpl.java      |  42 ++--
 .../ambari/server/topology/AmbariContext.java   |  26 +--
 .../ambari/server/topology/Blueprint.java       |   2 +
 .../server/topology/BlueprintFactory.java       |   6 +-
 .../ambari/server/topology/BlueprintImpl.java   |  33 ++-
 .../ambari/server/topology/Credential.java      |  71 ++++++
 .../server/topology/SecurityConfiguration.java  |  67 ++++++
 .../topology/SecurityConfigurationFactory.java  | 175 ++++++++++++++
 .../ambari/server/topology/TopologyManager.java | 226 +++++++++++++++----
 .../server/topology/TopologyRequestFactory.java |   3 +-
 .../topology/TopologyRequestFactoryImpl.java    |   5 +-
 .../server/upgrade/UpgradeCatalog213.java       |  18 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   2 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   2 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   2 +
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   2 +
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   2 +
 .../src/main/resources/properties.json          |   1 +
 .../internal/AlertResourceProviderTest.java     |  54 ++---
 .../internal/BlueprintResourceProviderTest.java | 145 ++++++++----
 .../internal/ClusterResourceProviderTest.java   | 128 ++++++++---
 .../internal/ProvisionClusterRequestTest.java   |  68 ++++--
 .../internal/WidgetResourceProviderTest.java    |   6 +-
 .../ambari/server/state/UpgradeHelperTest.java  |  63 +++---
 .../server/state/cluster/ClustersTest.java      |  45 ++--
 .../server/topology/AmbariContextTest.java      |  22 +-
 .../server/topology/BlueprintFactoryTest.java   |  12 +-
 .../server/topology/BlueprintImplTest.java      |  16 +-
 .../SecurityConfigurationFactoryTest.java       | 163 +++++++++++++
 .../server/topology/TopologyManagerTest.java    |  29 ++-
 .../server/upgrade/UpgradeCatalog213Test.java   |  15 ++
 .../ambari/server/utils/StageUtilsTest.java     |  66 +++---
 45 files changed, 1489 insertions(+), 435 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 615b46d..9b53a6a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -60,6 +60,18 @@ import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -171,17 +183,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.core.GrantedAuthority;
 
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Multimap;
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
-
 @Singleton
 public class AmbariManagementControllerImpl implements AmbariManagementController {
 
@@ -418,7 +419,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       throw new HostNotFoundException(invalidHostsStr.toString());
     }
 
-    clusters.addCluster(request.getClusterName(), stackId);
+    clusters.addCluster(request.getClusterName(), stackId, request.getSecurityType());
     Cluster c = clusters.getCluster(request.getClusterName());
 
     if (request.getHostNames() != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 021f4c8..1db86e5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -19,19 +19,16 @@
 package org.apache.ambari.server.controller;
 
 
-import java.io.File;
-import java.io.IOException;
-import java.net.Authenticator;
-import java.net.BindException;
-import java.net.PasswordAuthentication;
-import java.net.URL;
-import java.util.EnumSet;
-import java.util.Enumeration;
-import java.util.Map;
-
-import javax.crypto.BadPaddingException;
-import javax.servlet.DispatcherType;
-
+import com.google.common.util.concurrent.ServiceManager;
+import com.google.gson.Gson;
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Scopes;
+import com.google.inject.Singleton;
+import com.google.inject.name.Named;
+import com.google.inject.persist.Transactional;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
 import org.apache.ambari.eventdb.webservice.WorkflowJsonService;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.StateRecoveryManager;
@@ -99,6 +96,7 @@ import org.apache.ambari.server.security.unsecured.rest.ConnectionInfo;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.topology.AmbariContext;
 import org.apache.ambari.server.topology.BlueprintFactory;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
 import org.apache.ambari.server.topology.TopologyManager;
 import org.apache.ambari.server.topology.TopologyRequestFactoryImpl;
 import org.apache.ambari.server.utils.StageUtils;
@@ -113,8 +111,8 @@ import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
 import org.eclipse.jetty.servlet.DefaultServlet;
 import org.eclipse.jetty.servlet.FilterHolder;
 import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlets.GzipFilter;
 import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.servlets.GzipFilter;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 import org.slf4j.Logger;
@@ -128,16 +126,17 @@ import org.springframework.web.context.request.RequestContextListener;
 import org.springframework.web.context.support.GenericWebApplicationContext;
 import org.springframework.web.filter.DelegatingFilterProxy;
 
-import com.google.common.util.concurrent.ServiceManager;
-import com.google.gson.Gson;
-import com.google.inject.Guice;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Scopes;
-import com.google.inject.Singleton;
-import com.google.inject.name.Named;
-import com.google.inject.persist.Transactional;
-import com.sun.jersey.spi.container.servlet.ServletContainer;
+import javax.crypto.BadPaddingException;
+import javax.servlet.DispatcherType;
+import java.io.File;
+import java.io.IOException;
+import java.net.Authenticator;
+import java.net.BindException;
+import java.net.PasswordAuthentication;
+import java.net.URL;
+import java.util.EnumSet;
+import java.util.Enumeration;
+import java.util.Map;
 
 @Singleton
 public class AmbariServer {
@@ -154,27 +153,27 @@ public class AmbariServer {
   static {
     Velocity.setProperty("runtime.log.logsystem.log4j.logger", VELOCITY_LOG_CATEGORY);
   }
-  
+
   private static final String CLASSPATH_CHECK_CLASS = "org/apache/ambari/server/controller/AmbariServer.class";
   private static final String CLASSPATH_SANITY_CHECK_FAILURE_MESSAGE = "%s class is found in multiple jar files. Possible reasons include multiple ambari server jar files in the ambari classpath.\n" +
-  "Check for additional ambari server jar files and check that /usr/lib/ambari-server/ambari-server*.jar matches only one file.";
-  
+      "Check for additional ambari server jar files and check that /usr/lib/ambari-server/ambari-server*.jar matches only one file.";
+
   static {
     Enumeration<URL> ambariServerClassUrls;
     try {
       ambariServerClassUrls = AmbariServer.class.getClassLoader().getResources(CLASSPATH_CHECK_CLASS);
-      
+
       int ambariServerClassUrlsSize = 0;
-      while(ambariServerClassUrls.hasMoreElements()){
+      while (ambariServerClassUrls.hasMoreElements()) {
         ambariServerClassUrlsSize++;
         URL url = ambariServerClassUrls.nextElement();
         LOG.info(String.format("Found %s class in %s", CLASSPATH_CHECK_CLASS, url.getPath()));
       }
-      if(ambariServerClassUrlsSize>1) {
+      if (ambariServerClassUrlsSize > 1) {
         throw new RuntimeException(String.format(CLASSPATH_SANITY_CHECK_FAILURE_MESSAGE, CLASSPATH_CHECK_CLASS));
       }
     } catch (IOException e) {
-        e.printStackTrace();
+      e.printStackTrace();
     }
   }
 
@@ -332,7 +331,7 @@ public class AmbariServer {
 
       if (configs.getApiAuthentication()) {
         root.addFilter(new FilterHolder(springSecurityFilter), "/api/*", DISPATCHER_TYPES);
-      // root.addFilter(new FilterHolder(springSecurityFilter), "/proxy/*", DISPATCHER_TYPES);
+        // root.addFilter(new FilterHolder(springSecurityFilter), "/proxy/*", DISPATCHER_TYPES);
       }
 
 
@@ -442,7 +441,7 @@ public class AmbariServer {
 
       if (configs.csrfProtectionEnabled()) {
         sh.setInitParameter("com.sun.jersey.spi.container.ContainerRequestFilters",
-                    "org.apache.ambari.server.api.AmbariCsrfProtectionFilter");
+            "org.apache.ambari.server.api.AmbariCsrfProtectionFilter");
         /* proxy.setInitParameter("com.sun.jersey.spi.container.ContainerRequestFilters",
                     "org.apache.ambari.server.api.AmbariCsrfProtectionFilter"); */
       }
@@ -461,7 +460,7 @@ public class AmbariServer {
 
       if (configs.getApiSSLAuthentication()) {
         String httpsKeystore = configsMap.get(Configuration.CLIENT_API_SSL_KSTR_DIR_NAME_KEY) +
-          File.separator + configsMap.get(Configuration.CLIENT_API_SSL_KSTR_NAME_KEY);
+            File.separator + configsMap.get(Configuration.CLIENT_API_SSL_KSTR_NAME_KEY);
         String httpsTruststore = configsMap.get(Configuration.CLIENT_API_SSL_KSTR_DIR_NAME_KEY) +
             File.separator + configsMap.get(Configuration.CLIENT_API_SSL_TSTR_NAME_KEY);
         LOG.info("API SSL Authentication is turned on. Keystore - " + httpsKeystore);
@@ -481,8 +480,7 @@ public class AmbariServer {
         sapiConnector.setTruststoreType(configsMap.get(Configuration.CLIENT_API_SSL_KSTR_TYPE_KEY));
         sapiConnector.setMaxIdleTime(configs.getConnectionMaxIdleTime());
         apiConnector = sapiConnector;
-      }
-      else  {
+      } else {
         apiConnector = new SelectChannelConnector();
         apiConnector.setPort(configs.getClientApiPort());
         apiConnector.setMaxIdleTime(configs.getConnectionMaxIdleTime());
@@ -519,13 +517,13 @@ public class AmbariServer {
 
       LOG.info("********* Initializing Scheduled Request Manager **********");
       ExecutionScheduleManager executionScheduleManager = injector
-        .getInstance(ExecutionScheduleManager.class);
+          .getInstance(ExecutionScheduleManager.class);
 
 
       clusterController = controller;
 
       StateRecoveryManager recoveryManager = injector.getInstance(
-              StateRecoveryManager.class);
+          StateRecoveryManager.class);
       recoveryManager.doWork();
 
       /*
@@ -547,11 +545,11 @@ public class AmbariServer {
 
       server.join();
       LOG.info("Joined the Server");
-    } catch (BadPaddingException bpe){
+    } catch (BadPaddingException bpe) {
       LOG.error("Bad keystore or private key password. " +
-        "HTTPS certificate re-importing may be required.");
+          "HTTPS certificate re-importing may be required.");
       throw bpe;
-    } catch(BindException bindException) {
+    } catch (BindException bindException) {
       LOG.error("Could not bind to server port - instance may already be running. " +
           "Terminating this instance.", bindException);
       throw bindException;
@@ -563,12 +561,12 @@ public class AmbariServer {
    * at server properties)
    */
   private void disableInsecureProtocols(SslContextFactory factory) {
-    if (! configs.getSrvrDisabledCiphers().isEmpty()) {
-      String [] masks = configs.getSrvrDisabledCiphers().split(DISABLED_ENTRIES_SPLITTER);
+    if (!configs.getSrvrDisabledCiphers().isEmpty()) {
+      String[] masks = configs.getSrvrDisabledCiphers().split(DISABLED_ENTRIES_SPLITTER);
       factory.setExcludeCipherSuites(masks);
     }
-    if (! configs.getSrvrDisabledProtocols().isEmpty()) {
-      String [] masks = configs.getSrvrDisabledProtocols().split(DISABLED_ENTRIES_SPLITTER);
+    if (!configs.getSrvrDisabledProtocols().isEmpty()) {
+      String[] masks = configs.getSrvrDisabledProtocols().split(DISABLED_ENTRIES_SPLITTER);
       factory.setExcludeProtocols(masks);
     }
   }
@@ -600,11 +598,11 @@ public class AmbariServer {
       FilterHolder gzipFilter = context.addFilter(GzipFilter.class, "/*",
           EnumSet.of(DispatcherType.REQUEST));
 
-      gzipFilter.setInitParameter("methods","GET,POST,PUT,DELETE");
+      gzipFilter.setInitParameter("methods", "GET,POST,PUT,DELETE");
       gzipFilter.setInitParameter("mimeTypes",
           "text/html,text/plain,text/xml,text/css,application/x-javascript," +
-          "application/xml,application/x-www-form-urlencoded," +
-          "application/javascript,application/json");
+              "application/xml,application/x-www-form-urlencoded," +
+              "application/javascript,application/json");
       gzipFilter.setInitParameter("minGzipSize", configs.getApiGzipMinSize());
     }
   }
@@ -682,15 +680,16 @@ public class AmbariServer {
     StageUtils.setGson(injector.getInstance(Gson.class));
     StageUtils.setTopologyManager(injector.getInstance(TopologyManager.class));
     WorkflowJsonService.setDBProperties(
-      injector.getInstance(Configuration.class));
+        injector.getInstance(Configuration.class));
     SecurityFilter.init(injector.getInstance(Configuration.class));
     StackDefinedPropertyProvider.init(injector);
     AbstractControllerResourceProvider.init(injector.getInstance(ResourceProviderFactory.class));
     BlueprintResourceProvider.init(injector.getInstance(BlueprintFactory.class),
-        injector.getInstance(BlueprintDAO.class), injector.getInstance(Gson.class));
+        injector.getInstance(BlueprintDAO.class), injector.getInstance(SecurityConfigurationFactory.class), injector.getInstance(Gson.class));
     StackDependencyResourceProvider.init(ambariMetaInfo);
     ClusterResourceProvider.init(injector.getInstance(TopologyManager.class),
-        injector.getInstance(TopologyRequestFactoryImpl.class));
+        injector.getInstance(TopologyRequestFactoryImpl.class), injector.getInstance(SecurityConfigurationFactory
+            .class), injector.getInstance(Gson.class));
     HostResourceProvider.setTopologyManager(injector.getInstance(TopologyManager.class));
     BlueprintFactory.init(injector.getInstance(BlueprintDAO.class));
     BaseClusterRequest.init(injector.getInstance(BlueprintFactory.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index 0426625..60217c0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -103,6 +103,7 @@ import org.apache.ambari.server.state.scheduler.RequestExecutionImpl;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
 import org.apache.ambari.server.topology.BlueprintFactory;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
 import org.apache.ambari.server.view.ViewInstanceHandlerList;
 import org.eclipse.jetty.server.SessionIdManager;
 import org.eclipse.jetty.server.SessionManager;
@@ -331,13 +332,14 @@ public class ControllerModule extends AbstractModule {
         to(configuration.getExecutionCommandsCacheSize());
 
     bind(AmbariManagementController.class).to(
-        AmbariManagementControllerImpl.class);
+      AmbariManagementControllerImpl.class);
     bind(AbstractRootServiceResponseFactory.class).to(RootServiceResponseFactory.class);
     bind(ExecutionScheduler.class).to(ExecutionSchedulerImpl.class);
     bind(DBAccessor.class).to(DBAccessorImpl.class);
     bind(ViewInstanceHandlerList.class).to(AmbariHandlerList.class);
     bind(TimelineMetricCacheProvider.class);
     bind(TimelineMetricCacheEntryFactory.class);
+    bind(SecurityConfigurationFactory.class).in(Scopes.SINGLETON);
 
     requestStaticInjection(ExecutionCommandWrapper.class);
     requestStaticInjection(DatabaseChecker.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
index 22bffe6..a67317a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseClusterRequest.java
@@ -18,11 +18,6 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.ambari.server.api.predicate.InvalidQueryException;
 import org.apache.ambari.server.api.predicate.QueryLexer;
 import org.apache.ambari.server.api.predicate.Token;
@@ -34,8 +29,14 @@ import org.apache.ambari.server.topology.BlueprintFactory;
 import org.apache.ambari.server.topology.Configuration;
 import org.apache.ambari.server.topology.HostGroupInfo;
 import org.apache.ambari.server.topology.InvalidTopologyTemplateException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
 import org.apache.ambari.server.topology.TopologyRequest;
 
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
 /**
  * Provides common cluster request functionality.
  */
@@ -62,6 +63,11 @@ public abstract class BaseClusterRequest implements TopologyRequest {
   protected Configuration configuration;
 
   /**
+   * security configuration
+   */
+  protected SecurityConfiguration securityConfiguration;
+
+  /**
    * blueprint factory
    */
   protected static BlueprintFactory blueprintFactory;
@@ -162,6 +168,11 @@ public abstract class BaseClusterRequest implements TopologyRequest {
     return blueprintFactory;
   }
 
+
+  public SecurityConfiguration getSecurityConfiguration() {
+    return securityConfiguration;
+  }
+
   /**
    * Get the host resource provider instance.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 223db51..d1d7e64 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -2114,6 +2114,11 @@ public class BlueprintConfigurationProcessor {
       }
 
     }
+
+    if(clusterTopology.isClusterKerberosEnabled()) {
+      configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, "security_enabled", "true");
+      configTypesUpdated.add(CLUSTER_ENV_CONFIG_TYPE_NAME);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
index 6cb6a74..fa355fa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
@@ -20,16 +20,7 @@ package org.apache.ambari.server.controller.internal;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.gson.Gson;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -51,11 +42,23 @@ import org.apache.ambari.server.orm.entities.HostGroupComponentEntity;
 import org.apache.ambari.server.orm.entities.HostGroupEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.stack.NoSuchStackException;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.topology.Blueprint;
 import org.apache.ambari.server.topology.BlueprintFactory;
 import org.apache.ambari.server.topology.InvalidTopologyException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
 
-import com.google.gson.Gson;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 
 /**
@@ -73,6 +76,11 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
   public static final String STACK_VERSION_PROPERTY_ID =
       PropertyHelper.getPropertyId("Blueprints", "stack_version");
 
+  public static final String BLUEPRINT_SECURITY_PROPERTY_ID =
+    PropertyHelper.getPropertyId("Blueprints", "security");
+
+  public static final String BLUEPRINTS_PROPERTY_ID = "Blueprints";
+
   // Host Groups
   public static final String HOST_GROUP_PROPERTY_ID = "host_groups";
   public static final String HOST_GROUP_NAME_PROPERTY_ID = "name";
@@ -107,6 +115,11 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
   private static BlueprintFactory blueprintFactory;
 
   /**
+   * Used to create SecurityConfiguration instances
+   */
+  private static SecurityConfigurationFactory securityConfigurationFactory;
+
+  /**
    * Blueprint Data Access Object
    */
   private static BlueprintDAO blueprintDAO;
@@ -116,7 +129,6 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
    */
   private static Gson jsonSerializer;
 
-
   // ----- Constructors ----------------------------------------------------
 
   /**
@@ -140,9 +152,11 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
    * @param dao       blueprint data access object
    * @param gson      json serializer
    */
-  public static void init(BlueprintFactory factory, BlueprintDAO dao, Gson gson) {
+  public static void init(BlueprintFactory factory, BlueprintDAO dao, SecurityConfigurationFactory
+    securityFactory, Gson gson) {
     blueprintFactory = factory;
     blueprintDAO = dao;
+    securityConfigurationFactory = securityFactory;
     jsonSerializer = gson;
   }
 
@@ -242,8 +256,8 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
       modifyResources(new Command<Void>() {
         @Override
         public Void invoke() throws AmbariException {
-        blueprintDAO.removeByName(blueprintName);
-        return null;
+          blueprintDAO.removeByName(blueprintName);
+          return null;
         }
       });
     }
@@ -291,7 +305,16 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
     }
     setResourceProperty(resource, HOST_GROUP_PROPERTY_ID, listGroupProps, requestedIds);
     setResourceProperty(resource, CONFIGURATION_PROPERTY_ID,
-        populateConfigurationList(entity.getConfigurations()), requestedIds);
+      populateConfigurationList(entity.getConfigurations()), requestedIds);
+
+    if (entity.getSecurityType() != null) {
+      Map<String, String> securityConfigMap = new LinkedHashMap<>();
+      securityConfigMap.put(SecurityConfigurationFactory.TYPE_PROPERTY_ID, entity.getSecurityType().name());
+      if(entity.getSecurityType() == SecurityType.KERBEROS) {
+        securityConfigMap.put(SecurityConfigurationFactory.KERBEROS_DESCRIPTOR_REFERENCE_PROPERTY_ID, entity.getSecurityDescriptorReference());
+      }
+      setResourceProperty(resource, BLUEPRINT_SECURITY_PROPERTY_ID, securityConfigMap, requestedIds);
+    }
 
     return resource;
   }
@@ -405,9 +428,12 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide
             Preconditions.checkArgument(((Map) map).size() <= 1, CONFIGURATION_MAP_SIZE_CHECK_ERROR_MESSAGE);
           }
         }
+        SecurityConfiguration securityConfiguration = securityConfigurationFactory
+          .createSecurityConfigurationFromRequest((Map<String, Object>) rawBodyMap.get(BLUEPRINTS_PROPERTY_ID), true);
+
         Blueprint blueprint;
         try {
-          blueprint = blueprintFactory.createBlueprint(properties);
+          blueprint = blueprintFactory.createBlueprint(properties, securityConfiguration);
         } catch (NoSuchStackException e) {
           throw new IllegalArgumentException("Specified stack doesn't exist: " + e, e);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index e2f132e..cd28aac 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -17,6 +17,7 @@
  */
 package org.apache.ambari.server.controller.internal;
 
+import com.google.gson.Gson;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ClusterRequest;
@@ -25,15 +26,6 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.ServiceConfigVersionRequest;
 import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
@@ -45,7 +37,21 @@ import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.state.SecurityType;
-import org.apache.ambari.server.topology.*;
+import org.apache.ambari.server.topology.InvalidTopologyException;
+import org.apache.ambari.server.topology.InvalidTopologyTemplateException;
+import org.apache.ambari.server.topology.SecurityConfiguration;
+import org.apache.ambari.server.topology.SecurityConfigurationFactory;
+import org.apache.ambari.server.topology.TopologyManager;
+import org.apache.ambari.server.topology.TopologyRequestFactory;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 
 /**
@@ -56,8 +62,8 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
   // ----- Property ID constants ---------------------------------------------
 
   // Clusters
-  public static final String CLUSTER_ID_PROPERTY_ID      = PropertyHelper.getPropertyId("Clusters", "cluster_id");
-  public static final String CLUSTER_NAME_PROPERTY_ID    = PropertyHelper.getPropertyId("Clusters", "cluster_name");
+  public static final String CLUSTER_ID_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "cluster_id");
+  public static final String CLUSTER_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "cluster_name");
   public static final String CLUSTER_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "version");
   public static final String CLUSTER_PROVISIONING_STATE_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "provisioning_state");
   public static final String CLUSTER_SECURITY_TYPE_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "security_type");
@@ -67,6 +73,8 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
   public static final String CLUSTER_HEALTH_REPORT_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "health_report");
   public static final String CLUSTER_CREDENTIAL_STORE_PROPERTIES_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "credential_store_properties");
   public static final String BLUEPRINT_PROPERTY_ID = PropertyHelper.getPropertyId(null, "blueprint");
+  public static final String SECURITY_PROPERTY_ID = PropertyHelper.getPropertyId(null, "security");
+  public static final String CREDENTIALS_PROPERTY_ID = PropertyHelper.getPropertyId(null, "credentials");
   public static final String SESSION_ATTRIBUTES_PROPERTY_ID = "session_attributes";
 
   /**
@@ -90,6 +98,11 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
   private static TopologyRequestFactory topologyRequestFactory;
 
   /**
+   * Used to create SecurityConfiguration instances
+   */
+  private static SecurityConfigurationFactory securityConfigurationFactory;
+
+  /**
    * The cluster primary key properties.
    */
   private static Set<String> pkPropertyIds =
@@ -108,6 +121,11 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
    */
   private static Set<String> propertyIds = new HashSet<String>();
 
+  /**
+   * Used to serialize to/from json.
+   */
+  private static Gson jsonSerializer;
+
 
   static {
     propertyIds.add(CLUSTER_ID_PROPERTY_ID);
@@ -122,6 +140,8 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
     propertyIds.add(CLUSTER_CREDENTIAL_STORE_PROPERTIES_PROPERTY_ID);
     propertyIds.add(BLUEPRINT_PROPERTY_ID);
     propertyIds.add(SESSION_ATTRIBUTES_PROPERTY_ID);
+    propertyIds.add(SECURITY_PROPERTY_ID);
+    propertyIds.add(CREDENTIALS_PROPERTY_ID);
   }
 
 
@@ -149,7 +169,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
     RequestStatusResponse createResponse = null;
     for (final Map<String, Object> properties : request.getProperties()) {
       if (isCreateFromBlueprint(properties)) {
-        createResponse = processBlueprintCreate(properties);
+        createResponse = processBlueprintCreate(properties, request.getRequestInfoProperties());
       } else {
         createClusterResource(properties);
       }
@@ -320,6 +340,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
     baseUnsupported.remove("host_groups");
     baseUnsupported.remove("default_password");
     baseUnsupported.remove("configurations");
+    baseUnsupported.remove("credentials");
 
     return checkConfigPropertyIds(baseUnsupported, "Clusters");
   }
@@ -329,14 +350,17 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
 
   /**
    * Inject the blueprint data access object which is used to obtain blueprint entities.
-   *
-   * @param manager         topology manager
+   *  @param manager         topology manager
    * @param requestFactory  request factory
+   * @param instance
    */
   //todo: proper static injection mechanism
-  public static void init(TopologyManager manager, TopologyRequestFactory requestFactory) {
+  public static void init(TopologyManager manager, TopologyRequestFactory requestFactory,
+                          SecurityConfigurationFactory securityFactory, Gson instance) {
     topologyManager = manager;
     topologyRequestFactory = requestFactory;
+    securityConfigurationFactory = securityFactory;
+    jsonSerializer = instance;
   }
 
 
@@ -451,6 +475,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
    *
    * @param properties  request body properties
    *
+   * @param requestInfoProperties raw request body
    * @return asynchronous response information
    *
    * @throws ResourceAlreadyExistsException if cluster already exists
@@ -459,25 +484,36 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
    * @throws NoSuchParentResourceException  if a necessary parent resource doesn't exist
    */
   @SuppressWarnings("unchecked")
-  private RequestStatusResponse processBlueprintCreate(Map<String, Object> properties)
+  private RequestStatusResponse processBlueprintCreate(Map<String, Object> properties, Map<String, String> requestInfoProperties)
       throws ResourceAlreadyExistsException, SystemException, UnsupportedPropertyException,
       NoSuchParentResourceException {
 
     LOG.info("Creating Cluster '" + properties.get(CLUSTER_NAME_PROPERTY_ID) +
         "' based on blueprint '" + String.valueOf(properties.get(BLUEPRINT_PROPERTY_ID)) + "'.");
 
+    String rawRequestBody = requestInfoProperties.get(Request.REQUEST_INFO_BODY_PROPERTY);
+    Map<String, Object> rawBodyMap = jsonSerializer.<Map<String, Object>>fromJson(rawRequestBody, Map.class);
+
+    SecurityConfiguration securityConfiguration = securityConfigurationFactory.createSecurityConfigurationFromRequest
+      (rawBodyMap, false);
     ProvisionClusterRequest createClusterRequest;
     try {
-      createClusterRequest = topologyRequestFactory.createProvisionClusterRequest(properties);
+      createClusterRequest = topologyRequestFactory.createProvisionClusterRequest(properties, securityConfiguration);
     } catch (InvalidTopologyTemplateException e) {
       throw new IllegalArgumentException("Invalid Cluster Creation Template: " + e, e);
     }
 
+    if (securityConfiguration != null && securityConfiguration.getType() == SecurityType.NONE &&
+        createClusterRequest.getBlueprint().getSecurity() != null && createClusterRequest.getBlueprint().getSecurity()
+        .getType() == SecurityType.KERBEROS) {
+      throw new IllegalArgumentException("Setting security to NONE is not allowed as security type in blueprint is set to KERBEROS!");
+    }
+
     try {
       return topologyManager.provisionCluster(createClusterRequest);
     } catch (InvalidTopologyException e) {
       throw new IllegalArgumentException("Topology validation failed: " + e, e);
-    }  catch (AmbariException e) {
+    } catch (AmbariException e) {
       e.printStackTrace();
       throw new SystemException("Unknown exception when asking TopologyManager to provision cluster", e);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java
index c80ae2c..8c8b89d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java
@@ -133,7 +133,7 @@ public class ExportBlueprintRequest implements TopologyRequest {
       hostGroups.add(new HostGroupImpl(exportedHostGroup.getName(), bpName, stack, exportedHostGroup.getComponents(),
           exportedHostGroup.getConfiguration(), String.valueOf(exportedHostGroup.getCardinality())));
     }
-    blueprint = new BlueprintImpl(bpName, hostGroups, stack, configuration);
+    blueprint = new BlueprintImpl(bpName, hostGroups, stack, configuration, null);
   }
 
   private void createHostGroupInfo(Collection<ExportedHostGroup> exportedHostGroups) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/KerberosDescriptorResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/KerberosDescriptorResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/KerberosDescriptorResourceProvider.java
index cc02119..3cece5d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/KerberosDescriptorResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/KerberosDescriptorResourceProvider.java
@@ -70,6 +70,12 @@ public class KerberosDescriptorResourceProvider extends AbstractControllerResour
   }
 
   @Override
+  public Set<String> checkPropertyIds(Set<String> propertyIds) {
+    LOGGER.debug("Skipping property id validation for kerberos descriptor resources");
+    return Collections.emptySet();
+  }
+
+  @Override
   public RequestStatus createResources(Request request) throws SystemException, UnsupportedPropertyException,
       ResourceAlreadyExistsException, NoSuchParentResourceException {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java
index 4a906b1..9716abe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ProvisionClusterRequest.java
@@ -17,22 +17,29 @@
  */
 package org.apache.ambari.server.controller.internal;
 
+import com.google.common.base.Enums;
+import com.google.common.base.Strings;
 import org.apache.ambari.server.api.predicate.InvalidQueryException;
+import org.apache.ambari.server.security.encryption.CredentialStoreType;
 import org.apache.ambari.server.stack.NoSuchStackException;
 import org.apache.ambari.server.topology.Configuration;
 import org.apache.ambari.server.topology.ConfigurationFactory;
+import org.apache.ambari.server.topology.Credential;
 import org.apache.ambari.server.topology.HostGroupInfo;
 import org.apache.ambari.server.topology.InvalidTopologyTemplateException;
 import org.apache.ambari.server.topology.NoSuchBlueprintException;
 import org.apache.ambari.server.topology.RequiredPasswordValidator;
+import org.apache.ambari.server.topology.SecurityConfiguration;
 import org.apache.ambari.server.topology.TopologyValidator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 /**
  * Request for provisioning a cluster.
@@ -94,14 +101,18 @@ public class ProvisionClusterRequest extends BaseClusterRequest {
    */
   private String defaultPassword;
 
+  private Map<String, Credential> credentialsMap;
+
   private final static Logger LOG = LoggerFactory.getLogger(ProvisionClusterRequest.class);
 
   /**
    * Constructor.
    *
    * @param properties  request properties
+   * @param securityConfiguration  security config related properties
    */
-  public ProvisionClusterRequest(Map<String, Object> properties) throws InvalidTopologyTemplateException {
+  public ProvisionClusterRequest(Map<String, Object> properties, SecurityConfiguration securityConfiguration) throws
+    InvalidTopologyTemplateException {
     setClusterName(String.valueOf(properties.get(
         ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID)));
 
@@ -117,12 +128,52 @@ public class ProvisionClusterRequest extends BaseClusterRequest {
       throw new InvalidTopologyTemplateException("The specified blueprint doesn't exist: " + e, e);
     }
 
+    this.securityConfiguration = securityConfiguration;
+
     Configuration configuration = configurationFactory.getConfiguration(
         (Collection<Map<String, String>>) properties.get(CONFIGURATIONS_PROPERTY));
     configuration.setParentConfiguration(blueprint.getConfiguration());
     setConfiguration(configuration);
 
     parseHostGroupInfo(properties);
+
+    this.credentialsMap = parseCredentials(properties);
+  }
+
+  private Map<String, Credential> parseCredentials(Map<String, Object> properties) throws
+    InvalidTopologyTemplateException {
+    HashMap<String, Credential> credentialHashMap = new HashMap<>();
+    Set<Map<String, String>> credentialsSet = (Set<Map<String, String>>) properties.get(ClusterResourceProvider.CREDENTIALS_PROPERTY_ID);
+    if (credentialsSet != null) {
+      for (Map<String, String> credentialMap : credentialsSet) {
+        String alias = Strings.emptyToNull(credentialMap.get("alias"));
+        if (alias == null) {
+          throw new InvalidTopologyTemplateException("credential.alias property is missing.");
+        }
+        String principal = Strings.emptyToNull(credentialMap.get("principal"));
+        if (principal == null) {
+          throw new InvalidTopologyTemplateException("credential.principal property is missing.");
+        }
+        String key = Strings.emptyToNull(credentialMap.get("key"));
+        if (key == null) {
+          throw new InvalidTopologyTemplateException("credential.key is missing.");
+        }
+        String typeString = Strings.emptyToNull(credentialMap.get("type"));
+        if (typeString == null) {
+          throw new InvalidTopologyTemplateException("credential.type is missing.");
+        }
+        CredentialStoreType type = Enums.getIfPresent(CredentialStoreType.class, typeString.toUpperCase()).orNull();
+        if (type == null) {
+          throw new InvalidTopologyTemplateException("credential.type is invalid.");
+        }
+        credentialHashMap.put(alias, new Credential(alias, principal, key, type));
+      }
+    }
+    return credentialHashMap;
+  }
+
+  public Map<String, Credential> getCredentialsMap() {
+    return credentialsMap;
   }
 
   public String getClusterName() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
index ada924a..8578d6b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
@@ -18,21 +18,21 @@
 
 package org.apache.ambari.server.orm.entities;
 
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import com.google.gson.Gson;
+import org.apache.ambari.server.state.SecurityType;
 
+import javax.persistence.Basic;
 import javax.persistence.CascadeType;
 import javax.persistence.Column;
 import javax.persistence.Entity;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.NamedQuery;
 import javax.persistence.OneToMany;
 import javax.persistence.OneToOne;
 import javax.persistence.Table;
-import javax.persistence.Transient;
+import java.util.Collection;
 
 
 /**
@@ -49,6 +49,14 @@ public class BlueprintEntity {
       updatable = false, unique = true, length = 100)
   private String blueprintName;
 
+  @Basic
+  @Enumerated(value = EnumType.STRING)
+  @Column(name = "security_type", nullable = false, insertable = true, updatable = true)
+  private SecurityType securityType = SecurityType.NONE;
+
+  @Basic
+  @Column(name = "security_descriptor_reference", nullable = true, insertable = true, updatable = true)
+  private String securityDescriptorReference;
 
   /**
    * Unidirectional one-to-one association to {@link StackEntity}
@@ -136,4 +144,20 @@ public class BlueprintEntity {
   public void setConfigurations(Collection<BlueprintConfigEntity> configurations) {
     this.configurations = configurations;
   }
+
+  public SecurityType getSecurityType() {
+    return securityType;
+  }
+
+  public void setSecurityType(SecurityType securityType) {
+    this.securityType = securityType;
+  }
+
+  public String getSecurityDescriptorReference() {
+    return securityDescriptorReference;
+  }
+
+  public void setSecurityDescriptorReference(String securityDescriptorReference) {
+    this.securityDescriptorReference = securityDescriptorReference;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/state/Clusters.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Clusters.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Clusters.java
index 3aa27dd..a1ebaba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Clusters.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Clusters.java
@@ -18,12 +18,12 @@
 
 package org.apache.ambari.server.state;
 
+import org.apache.ambari.server.AmbariException;
+
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.ambari.server.AmbariException;
-
 /**
  * Single entity that tracks all clusters and hosts that are managed
  * by the Ambari server
@@ -32,7 +32,6 @@ public interface Clusters {
 
   /**
    * Add a new Cluster
-   * 
    * @param clusterName
    *          the cluster name (not {@code null}).
    * @param stackId
@@ -42,6 +41,19 @@ public interface Clusters {
       throws AmbariException;
 
   /**
+   * Add a new cluster
+   * @param clusterName
+   *          the cluster name (not {@code null}).
+   * @param stackId
+   *          the stack for the cluster (not {@code null}).
+   * @param securityType
+   *          the cluster will be created with this security type.
+   * @throws AmbariException
+   */
+  public void addCluster(String clusterName, StackId stackId, SecurityType securityType)
+    throws AmbariException;
+
+  /**
    * Gets the Cluster given the cluster name
    * @param clusterName Name of the Cluster to retrieve
    * @return  <code>Cluster</code> identified by the given name

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
index 4040c5f..a89fb91 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
@@ -18,20 +18,9 @@
 
 package org.apache.ambari.server.state.cluster;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import javax.persistence.RollbackException;
-
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -73,6 +62,7 @@ import org.apache.ambari.server.state.HostHealthStatus;
 import org.apache.ambari.server.state.HostHealthStatus.HealthStatus;
 import org.apache.ambari.server.state.HostState;
 import org.apache.ambari.server.state.RepositoryInfo;
+import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.host.HostFactory;
@@ -80,9 +70,18 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.core.GrantedAuthority;
 
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
+import javax.persistence.RollbackException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 @Singleton
 public class ClustersImpl implements Clusters {
@@ -199,6 +198,12 @@ public class ClustersImpl implements Clusters {
 
   @Override
   public void addCluster(String clusterName, StackId stackId)
+    throws AmbariException {
+    addCluster(clusterName, stackId, null);
+  }
+
+  @Override
+  public void addCluster(String clusterName, StackId stackId, SecurityType securityType)
       throws AmbariException {
     checkLoaded();
 
@@ -232,6 +237,9 @@ public class ClustersImpl implements Clusters {
       clusterEntity.setClusterName(clusterName);
       clusterEntity.setDesiredStack(stackEntity);
       clusterEntity.setResource(resourceEntity);
+      if (securityType != null) {
+        clusterEntity.setSecurityType(securityType);
+      }
 
       try {
         clusterDAO.create(clusterEntity);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index 0cf3bf2..5e93aeb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -18,15 +18,6 @@
 
 package org.apache.ambari.server.topology;
 
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicLong;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.Role;
@@ -68,6 +59,15 @@ import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+
 /**
  * Provides topology related information as well as access to the core Ambari functionality.
  */
@@ -136,15 +136,15 @@ public class AmbariContext {
     return getController().getActionManager().getTaskById(id);
   }
 
-  public void createAmbariResources(ClusterTopology topology, String clusterName) {
+  public void createAmbariResources(ClusterTopology topology, String clusterName, SecurityType securityType) {
     Stack stack = topology.getBlueprint().getStack();
-    createAmbariClusterResource(clusterName, stack.getName(), stack.getVersion());
+    createAmbariClusterResource(clusterName, stack.getName(), stack.getVersion(), securityType);
     createAmbariServiceAndComponentResources(topology, clusterName);
   }
 
-  public void createAmbariClusterResource(String clusterName, String stackName, String stackVersion) {
+  public void createAmbariClusterResource(String clusterName, String stackName, String stackVersion, SecurityType securityType) {
     String stackInfo = String.format("%s-%s", stackName, stackVersion);
-    ClusterRequest clusterRequest = new ClusterRequest(null, clusterName, stackInfo, null);
+    ClusterRequest clusterRequest = new ClusterRequest(null, clusterName, null, securityType, stackInfo, null);
     try {
       getController().createCluster(clusterRequest);
     } catch (AmbariException e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
index fa65022..11311db 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
@@ -103,6 +103,8 @@ public interface Blueprint {
    */
   public Collection<HostGroup> getHostGroupsForComponent(String component);
 
+  public SecurityConfiguration getSecurity();
+
   /**
    * Validate the blueprint topology.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
index 210504d..b8ce749 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
@@ -22,7 +22,6 @@ package org.apache.ambari.server.topology;
 import com.google.inject.Inject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ObjectNotFoundException;
-import org.apache.ambari.server.StackAccessException;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.AmbariServer;
 import org.apache.ambari.server.controller.internal.Stack;
@@ -88,10 +87,11 @@ public class BlueprintFactory {
    * Convert a map of properties to a blueprint entity.
    *
    * @param properties  property map
+   * @param securityConfiguration security related properties
    * @return new blueprint entity
    */
   @SuppressWarnings("unchecked")
-  public Blueprint createBlueprint(Map<String, Object> properties) throws NoSuchStackException {
+  public Blueprint createBlueprint(Map<String, Object> properties, SecurityConfiguration securityConfiguration) throws NoSuchStackException {
     String name = String.valueOf(properties.get(BLUEPRINT_NAME_PROPERTY_ID));
     // String.valueOf() will return "null" if value is null
     if (name.equals("null") || name.isEmpty()) {
@@ -104,7 +104,7 @@ public class BlueprintFactory {
     Configuration configuration = configFactory.getConfiguration((Collection<Map<String, String>>)
         properties.get(CONFIGURATION_PROPERTY_ID));
 
-    return new BlueprintImpl(name, hostGroups, stack, configuration);
+    return new BlueprintImpl(name, hostGroups, stack, configuration, securityConfiguration);
   }
 
   protected Stack createStack(Map<String, Object> properties) throws NoSuchStackException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
index 481d217..88052b0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
@@ -19,12 +19,6 @@
 
 package org.apache.ambari.server.topology;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-
 import com.google.gson.Gson;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.StackAccessException;
@@ -39,6 +33,12 @@ import org.apache.ambari.server.orm.entities.HostGroupEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.stack.NoSuchStackException;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+
 /**
  * Blueprint implementation.
  */
@@ -49,10 +49,14 @@ public class BlueprintImpl implements Blueprint {
   private Stack stack;
   private Configuration configuration;
   private BlueprintValidator validator;
-
+  private SecurityConfiguration security;
 
   public BlueprintImpl(BlueprintEntity entity) throws NoSuchStackException {
     this.name = entity.getBlueprintName();
+    if (entity.getSecurityType() != null) {
+      this.security = new SecurityConfiguration(entity.getSecurityType(), entity.getSecurityDescriptorReference(),
+        null);
+    }
 
     parseStack(entity.getStack());
 
@@ -63,9 +67,10 @@ public class BlueprintImpl implements Blueprint {
     validator = new BlueprintValidatorImpl(this);
   }
 
-  public BlueprintImpl(String name, Collection<HostGroup> groups, Stack stack, Configuration configuration) {
+  public BlueprintImpl(String name, Collection<HostGroup> groups, Stack stack, Configuration configuration, SecurityConfiguration security) {
     this.name = name;
     this.stack = stack;
+    this.security = security;
 
     // caller should set host group configs
     for (HostGroup hostGroup : groups) {
@@ -91,6 +96,10 @@ public class BlueprintImpl implements Blueprint {
     return stack.getVersion();
   }
 
+  public SecurityConfiguration getSecurity() {
+    return security;
+  }
+
   //todo: safe copy?
   @Override
   public Map<String, HostGroup> getHostGroups() {
@@ -182,6 +191,14 @@ public class BlueprintImpl implements Blueprint {
 
     BlueprintEntity entity = new BlueprintEntity();
     entity.setBlueprintName(name);
+    if (security != null) {
+      if (security.getType() != null) {
+        entity.setSecurityType(security.getType());
+      }
+      if (security.getDescriptorReference() != null) {
+        entity.setSecurityDescriptorReference(security.getDescriptorReference());
+      }
+    }
 
     //todo: not using stackDAO so stackEntity.id is not set
     //todo: this is now being set in BlueprintDAO

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/Credential.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Credential.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Credential.java
new file mode 100644
index 0000000..2651074
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Credential.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribut
+ * ed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import org.apache.ambari.server.security.encryption.CredentialStoreType;
+
+/**
+ * Holds credential info submitted in a cluster create template.
+ */
+public class Credential {
+
+  /**
+   * Credential alias like kdc.admin.credential.
+   */
+  private String alias;
+
+  /**
+   * Name of a principal.
+   */
+  private String principal;
+
+  /**
+   * Key of credential.
+   */
+  private String key;
+
+  /**
+   * Type of credential store.
+   */
+  private CredentialStoreType type;
+
+  public Credential(String alias, String principal, String key, CredentialStoreType type) {
+    this.alias = alias;
+    this.principal = principal;
+    this.key = key;
+    this.type = type;
+  }
+
+  public String getAlias() {
+    return alias;
+  }
+
+  public String getPrincipal() {
+    return principal;
+  }
+
+  public String getKey() {
+    return key;
+  }
+
+  public CredentialStoreType getType() {
+    return type;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java
new file mode 100644
index 0000000..b35f7b3
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribut
+ * ed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import org.apache.ambari.server.state.SecurityType;
+
+/**
+ * Holds security related properties, the securityType and security descriptor (in case of KERBEROS
+ * kerberos_descriptor) either contains the whole descriptor or just the reference to it.
+ *
+ */
+public class SecurityConfiguration {
+
+  /**
+   * Security Type
+   */
+  private SecurityType type;
+
+  /**
+   * Holds a reference to a kerberos_descriptor resource.
+   */
+  private String descriptorReference;
+
+  /**
+   * Content of a kerberos_descriptor as String.
+   */
+  private String descriptor;
+
+  public SecurityConfiguration(SecurityType type) {
+    this.type = type;
+  }
+
+  public SecurityConfiguration(SecurityType type, String descriptorReference, String descriptor) {
+    this.type = type;
+    this.descriptorReference = descriptorReference;
+    this.descriptor = descriptor;
+  }
+
+  public SecurityType getType() {
+    return type;
+  }
+
+  public String getDescriptor() {
+    return descriptor;
+  }
+
+  public String getDescriptorReference() {
+    return descriptorReference;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e67b5e5/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfigurationFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfigurationFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfigurationFactory.java
new file mode 100644
index 0000000..5f8cde1
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfigurationFactory.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribut
+ * ed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import com.google.common.base.Enums;
+import com.google.common.base.Strings;
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import org.apache.ambari.server.orm.dao.KerberosDescriptorDAO;
+import org.apache.ambari.server.orm.entities.KerberosDescriptorEntity;
+import org.apache.ambari.server.state.SecurityType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.UUID;
+
+public class SecurityConfigurationFactory {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SecurityConfigurationFactory.class);
+
+  public static final String SECURITY_PROPERTY_ID = "security";
+  public static final String TYPE_PROPERTY_ID = "type";
+  public static final String KERBEROS_DESCRIPTOR_PROPERTY_ID = "kerberos_descriptor";
+  public static final String KERBEROS_DESCRIPTOR_REFERENCE_PROPERTY_ID = "kerberos_descriptor_reference";
+
+  @Inject
+  protected Gson jsonSerializer;
+
+  @Inject
+  private KerberosDescriptorDAO kerberosDescriptorDAO;
+
+  @Inject
+  private KerberosDescriptorFactory kerberosDescriptorFactory;
+
+  public SecurityConfigurationFactory() {
+  }
+
+  protected SecurityConfigurationFactory(Gson jsonSerializer, KerberosDescriptorDAO kerberosDescriptorDAO, KerberosDescriptorFactory kerberosDescriptorFactory) {
+    this.jsonSerializer = jsonSerializer;
+    this.kerberosDescriptorDAO = kerberosDescriptorDAO;
+    this.kerberosDescriptorFactory = kerberosDescriptorFactory;
+  }
+
+  /**
+   * Creates and also validates SecurityConfiguration based on properties parsed from request Json.
+   *
+   * @param properties Security properties from Json parsed into a Map
+   * @param persistEmbeddedDescriptor whether to save embedded descriptor or not
+   * @return
+   */
+  public SecurityConfiguration createSecurityConfigurationFromRequest(Map<String, Object> properties, boolean
+    persistEmbeddedDescriptor) {
+
+    SecurityConfiguration securityConfiguration = null;
+
+    LOGGER.debug("Creating security configuration from properties: {}", properties);
+    Map<String, Object> securityProperties = (Map<String, Object>) properties.get(SECURITY_PROPERTY_ID);
+
+    if (securityProperties == null) {
+      LOGGER.debug("No security information properties provided, returning null");
+      return securityConfiguration;
+    }
+
+    String securityTypeString = Strings.emptyToNull((String) securityProperties.get(TYPE_PROPERTY_ID));
+    if (securityTypeString == null) {
+      LOGGER.error("Type is missing from security block.");
+      throw new IllegalArgumentException("Type missing from security block.");
+    }
+
+    SecurityType securityType = Enums.getIfPresent(SecurityType.class, securityTypeString).orNull();
+    if (securityType == null) {
+      LOGGER.error("Unsupported security type specified: {}", securityType);
+      throw new IllegalArgumentException("Invalid security type specified: " + securityTypeString);
+    }
+
+    if (securityType == SecurityType.KERBEROS) {
+
+      // get security information from the request propertie if any
+      String descriptorReference = Strings.emptyToNull((String)
+          securityProperties.get(KERBEROS_DESCRIPTOR_REFERENCE_PROPERTY_ID));
+
+      Object descriptorJsonMap = securityProperties.get(KERBEROS_DESCRIPTOR_PROPERTY_ID);
+
+      if (descriptorReference == null && descriptorJsonMap == null) {
+        LOGGER.error("Both kerberos descriptor and kerberos descriptor reference are null in the security configuration!");
+        throw new IllegalArgumentException(KERBEROS_DESCRIPTOR_PROPERTY_ID + " or "
+            + KERBEROS_DESCRIPTOR_REFERENCE_PROPERTY_ID + " is required for KERBEROS security setup.");
+      }
+
+      if (descriptorReference != null && descriptorJsonMap != null) {
+        LOGGER.error("Both kerberos descriptor and kerberos descriptor reference are set in the security configuration!");
+        throw new IllegalArgumentException("Usage of properties : " + KERBEROS_DESCRIPTOR_PROPERTY_ID + " and "
+            + KERBEROS_DESCRIPTOR_REFERENCE_PROPERTY_ID + " at the same time, is not allowed.");
+      }
+
+      String descriptorText = null;
+
+      if (descriptorJsonMap != null) { // this means the reference is null
+        LOGGER.debug("Found embedded descriptor: {}", descriptorJsonMap);
+        descriptorText = jsonSerializer.<Map<String, Object>>toJson(descriptorJsonMap, Map.class);
+        if (persistEmbeddedDescriptor) {
+          descriptorReference = persistKerberosDescriptor(descriptorText);
+        }
+        securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, descriptorReference, descriptorText);
+      } else { // this means the reference is not null
+        LOGGER.debug("Found descriptor reference: {}", descriptorReference);
+        securityConfiguration = loadSecurityConfigurationByReference(descriptorReference);
+      }
+    } else {
+      LOGGER.debug("There is no security configuration found in the request");
+      securityConfiguration = new SecurityConfiguration(SecurityType.NONE);
+    }
+    return securityConfiguration;
+  }
+
+  public SecurityConfiguration loadSecurityConfigurationByReference(String reference) {
+    SecurityConfiguration securityConfiguration = null;
+    LOGGER.debug("Loading security configuration by reference: {}", reference);
+
+    if (reference == null) {
+      LOGGER.error("No security configuration reference provided!");
+      throw new IllegalArgumentException("No security configuration reference provided!");
+    }
+
+    KerberosDescriptorEntity descriptorEntity = kerberosDescriptorDAO.findByName(reference);
+
+    if (descriptorEntity == null) {
+      LOGGER.error("No security configuration found for the reference: {}", reference);
+      throw new IllegalArgumentException("No security configuration found for the reference: " + reference);
+    }
+
+    securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, reference, descriptorEntity.getKerberosDescriptorText());
+
+    return securityConfiguration;
+
+  }
+
+  private String persistKerberosDescriptor(String descriptor) {
+    LOGGER.debug("Generating new kerberos descriptor reference ...");
+    String kdReference = generateKerberosDescriptorReference();
+
+    KerberosDescriptor kerberosDescriptor = kerberosDescriptorFactory.createKerberosDescriptor(kdReference, descriptor);
+
+    LOGGER.debug("Persisting kerberos descriptor ...");
+    kerberosDescriptorDAO.create(kerberosDescriptor.toEntity());
+    return kdReference;
+  }
+
+  // generates a unique name for the kerberos descriptor for further reference
+  private String generateKerberosDescriptorReference() {
+    String kdReference = UUID.randomUUID().toString();
+    LOGGER.debug("Generated new kerberos descriptor reference: {}", kdReference);
+    return kdReference;
+  }
+
+
+}