You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by js...@apache.org on 2015/04/27 07:53:03 UTC

[09/13] ambari git commit: AMBARI-10750. Initial merge of advanced api provisioning work.

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
index 9ef13ba..82d03fd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
@@ -37,11 +37,14 @@ import org.apache.ambari.server.controller.StackServiceResponse;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.AutoDeployInfo;
 import org.apache.ambari.server.state.DependencyInfo;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.topology.Cardinality;
+import org.apache.ambari.server.topology.Configuration;
 
 /**
  * Encapsulates stack information.
  */
-class Stack {
+public class Stack {
   /**
    * Stack name
    */
@@ -88,6 +91,10 @@ class Stack {
    */
   private Map<String, String> cardinalityRequirements = new HashMap<String, String>();
 
+  //todo: instead of all these maps from component -> * ,
+  //todo: we should use a Component object with all of these attributes
+  private Set<String> masterComponents = new HashSet<String>();
+
   /**
    * Map of component to auto-deploy information
    */
@@ -101,45 +108,28 @@ class Stack {
       new HashMap<String, Map<String, Map<String, ConfigProperty>>>();
 
   /**
-   * Map of service to set of excluded config types
+   * Map of service to required type properties
    */
-  private Map<String, Set<String>> excludedConfigurationTypes =
-    new HashMap<String, Set<String>>();
+  private Map<String, Map<String, Map<String, ConfigProperty>>> requiredServiceConfigurations =
+      new HashMap<String, Map<String, Map<String, ConfigProperty>>>();
 
   /**
-   * Ambari Management Controller, used to obtain Stack definitions
+   * Map of service to config type properties
    */
-  private final AmbariManagementController ambariManagementController;
+  private Map<String, Map<String, ConfigProperty>> stackConfigurations =
+      new HashMap<String, Map<String, ConfigProperty>>();
 
   /**
-   * Contains a configuration property's value and attributes.
+   * Map of service to set of excluded config types
    */
-  private class ConfigProperty {
-
-    private ConfigProperty(String value, Map<String, String> attributes) {
-      this.value = value;
-      this.attributes = attributes;
-    }
-
-    private String value;
-    private Map<String, String> attributes;
-
-    public String getValue() {
-      return value;
-    }
-
-    public void setValue(String value) {
-      this.value = value;
-    }
+  private Map<String, Set<String>> excludedConfigurationTypes =
+    new HashMap<String, Set<String>>();
 
-    public Map<String, String> getAttributes() {
-      return attributes;
-    }
+  /**
+   * Ambari Management Controller, used to obtain Stack definitions
+   */
+  private final AmbariManagementController controller;
 
-    public void setAttributes(Map<String, String> attributes) {
-      this.attributes = attributes;
-    }
-  }
 
   /**
    * Constructor.
@@ -163,15 +153,16 @@ class Stack {
    * @param name     stack name
    * @param version  stack version
    *
-   * @throws org.apache.ambari.server.AmbariException an exception occurred getting stack information
+   * @throws AmbariException an exception occurred getting stack information
    *                         for the specified name and version
    */
-  public Stack(String name, String version, AmbariManagementController ambariManagementController) throws AmbariException {
+  //todo: don't pass management controller in constructor
+  public Stack(String name, String version, AmbariManagementController controller) throws AmbariException {
     this.name = name;
     this.version = version;
-    this.ambariManagementController = ambariManagementController;
+    this.controller = controller;
 
-    Set<StackServiceResponse> stackServices = ambariManagementController.getStackServices(
+    Set<StackServiceResponse> stackServices = controller.getStackServices(
         Collections.singleton(new StackServiceRequest(name, version, null)));
 
     for (StackServiceResponse stackService : stackServices) {
@@ -181,6 +172,9 @@ class Stack {
       parseConfigurations(serviceName);
       registerConditionalDependencies();
     }
+
+    //todo: already done for each service
+    parseStackConfigurations();
   }
 
   /**
@@ -227,26 +221,57 @@ class Stack {
   }
 
   /**
+   * Get all service components
+   *
+   * @return map of service to associated components
+   */
+  public Map<String, Collection<String>> getComponents() {
+    Map<String, Collection<String>> serviceComponents = new HashMap<String, Collection<String>>();
+    for (String service : getServices()) {
+      Collection<String> components = new HashSet<String>();
+      components.addAll(getComponents(service));
+      serviceComponents.put(service, components);
+    }
+    return serviceComponents;
+  }
+
+  /**
+   * Get all configuration types, including excluded types for the specified service.
+   *
+   * @param service  service name
+   *
+   * @return collection of all configuration types for the specified service
+   */
+  public Collection<String> getAllConfigurationTypes(String service) {
+    return serviceConfigurations.get(service).keySet();
+  }
+
+  /**
    * Get configuration types for the specified service.
+   * This doesn't include any service excluded types.
    *
    * @param service  service name
    *
-   * @return collection of configuration types for the specified service
+   * @return collection of all configuration types for the specified service
    */
   public Collection<String> getConfigurationTypes(String service) {
-    return serviceConfigurations.get(service).keySet();
+    Set<String> serviceTypes = new HashSet<String>(serviceConfigurations.get(service).keySet());
+    serviceTypes.removeAll(getExcludedConfigurationTypes(service));
+
+    return serviceTypes;
   }
 
   /**
-   * Get the set of excluded configuration types
-   *   for this service
+   * Get the set of excluded configuration types for this service.
    *
    * @param service service name
    *
-   * @return Set of names of excluded config types
+   * @return Set of names of excluded config types. Will not return null.
    */
   public Set<String> getExcludedConfigurationTypes(String service) {
-    return excludedConfigurationTypes.get(service);
+    return excludedConfigurationTypes.containsKey(service) ?
+        excludedConfigurationTypes.get(service) :
+        Collections.<String>emptySet();
   }
 
   /**
@@ -269,6 +294,62 @@ class Stack {
   }
 
   /**
+   * Get all required config properties for the specified service.
+   *
+   * @param service  service name
+   *
+   * @return collection of all required properties for the given service
+   */
+  public Collection<ConfigProperty> getRequiredConfigurationProperties(String service) {
+    Collection<ConfigProperty> requiredConfigProperties = new HashSet<ConfigProperty>();
+    Map<String, Map<String, ConfigProperty>> serviceProperties = requiredServiceConfigurations.get(service);
+    if (serviceProperties != null) {
+      for (Map.Entry<String, Map<String, ConfigProperty>> typePropertiesEntry : serviceProperties.entrySet()) {
+        requiredConfigProperties.addAll(typePropertiesEntry.getValue().values());
+      }
+    }
+    return requiredConfigProperties;
+  }
+
+  /**
+   * Get required config properties for the specified service and configuration type.
+   *
+   * @param service  service name
+   * @param type     configuration type
+   *
+   * @return collection of required properties for the given service and type
+   */
+  //todo: change type to PropertyInfo.PropertyType
+  public Collection<ConfigProperty> getRequiredConfigurationProperties(String service, String type) {
+    Collection<ConfigProperty> requiredConfigs = new HashSet<ConfigProperty>();
+    Map<String, ConfigProperty> configProperties = requiredServiceConfigurations.get(service).get(type);
+    if (configProperties != null) {
+      requiredConfigs.addAll(configProperties.values());
+    }
+    return requiredConfigs;
+  }
+
+  public boolean isPasswordProperty(String service, String type, String propertyName) {
+    return (serviceConfigurations.containsKey(service) &&
+            serviceConfigurations.get(service).containsKey(type) &&
+            serviceConfigurations.get(service).get(type).containsKey(propertyName) &&
+            serviceConfigurations.get(service).get(type).get(propertyName).getPropertyTypes().
+                contains(PropertyInfo.PropertyType.PASSWORD));
+  }
+
+  //todo
+  public Map<String, String> getStackConfigurationProperties(String type) {
+    Map<String, String> configMap = new HashMap<String, String>();
+    Map<String, ConfigProperty> configProperties = stackConfigurations.get(type);
+    if (configProperties != null) {
+      for (Map.Entry<String, ConfigProperty> configProperty : configProperties.entrySet()) {
+        configMap.put(configProperty.getKey(), configProperty.getValue().getValue());
+      }
+    }
+    return configMap;
+  }
+
+  /**
    * Get config attributes for the specified service and configuration type.
    *
    * @param service  service name
@@ -288,10 +369,37 @@ class Stack {
           for (Map.Entry<String, String> propertyAttribute : propertyAttributes.entrySet()) {
             String attributeName = propertyAttribute.getKey();
             String attributeValue = propertyAttribute.getValue();
+            if (attributeValue != null) {
+              Map<String, String> attributes = attributesMap.get(attributeName);
+              if (attributes == null) {
+                  attributes = new HashMap<String, String>();
+                  attributesMap.put(attributeName, attributes);
+              }
+              attributes.put(propertyName, attributeValue);
+            }
+          }
+        }
+      }
+    }
+    return attributesMap;
+  }
+
+  //todo:
+  public Map<String, Map<String, String>> getStackConfigurationAttributes(String type) {
+    Map<String, Map<String, String>> attributesMap = new HashMap<String, Map<String, String>>();
+    Map<String, ConfigProperty> configProperties = stackConfigurations.get(type);
+    if (configProperties != null) {
+      for (Map.Entry<String, ConfigProperty> configProperty : configProperties.entrySet()) {
+        String propertyName = configProperty.getKey();
+        Map<String, String> propertyAttributes = configProperty.getValue().getAttributes();
+        if (propertyAttributes != null) {
+          for (Map.Entry<String, String> propertyAttribute : propertyAttributes.entrySet()) {
+            String attributeName = propertyAttribute.getKey();
+            String attributeValue = propertyAttribute.getValue();
             Map<String, String> attributes = attributesMap.get(attributeName);
             if (attributes == null) {
-                attributes = new HashMap<String, String>();
-                attributesMap.put(attributeName, attributes);
+              attributes = new HashMap<String, String>();
+              attributesMap.put(attributeName, attributes);
             }
             attributes.put(propertyName, attributeValue);
           }
@@ -389,17 +497,90 @@ class Stack {
     return componentAutoDeployInfo.get(component);
   }
 
+  public boolean isMasterComponent(String component) {
+    return masterComponents.contains(component);
+  }
+
+  public Configuration getConfiguration(Collection<String> services) {
+    Map<String, Map<String, Map<String, String>>> attributes = new HashMap<String, Map<String, Map<String, String>>>();
+    Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+
+    for (String service : services) {
+      Collection<String> serviceConfigTypes = getConfigurationTypes(service);
+      for (String type : serviceConfigTypes) {
+        Map<String, String> typeProps = properties.get(type);
+        if (typeProps == null) {
+          typeProps = new HashMap<String, String>();
+          properties.put(type, typeProps);
+        }
+        typeProps.putAll(getConfigurationProperties(service, type));
+
+        Map<String, Map<String, String>> stackTypeAttributes = getConfigurationAttributes(service, type);
+        if (!stackTypeAttributes.isEmpty()) {
+          if (! attributes.containsKey(type)) {
+            attributes.put(type, new HashMap<String, Map<String, String>>());
+          }
+          Map<String, Map<String, String>> typeAttributes = attributes.get(type);
+          for (Map.Entry<String, Map<String, String>> attribute : stackTypeAttributes.entrySet()) {
+            String attributeName = attribute.getKey();
+            Map<String, String> attributeProps = typeAttributes.get(attributeName);
+            if (attributeProps == null) {
+              attributeProps = new HashMap<String, String>();
+              typeAttributes.put(attributeName, attributeProps);
+            }
+            attributeProps.putAll(attribute.getValue());
+          }
+        }
+      }
+    }
+    return new Configuration(properties, attributes);
+  }
+
+  public Configuration getConfiguration() {
+    Map<String, Map<String, Map<String, String>>> stackAttributes = new HashMap<String, Map<String, Map<String, String>>>();
+    Map<String, Map<String, String>> stackConfigs = new HashMap<String, Map<String, String>>();
+
+    for (String service : getServices()) {
+      for (String type : getAllConfigurationTypes(service)) {
+        Map<String, String> typeProps = stackConfigs.get(type);
+        if (typeProps == null) {
+          typeProps = new HashMap<String, String>();
+          stackConfigs.put(type, typeProps);
+        }
+        typeProps.putAll(getConfigurationProperties(service, type));
+
+        Map<String, Map<String, String>> stackTypeAttributes = getConfigurationAttributes(service, type);
+        if (!stackTypeAttributes.isEmpty()) {
+          if (! stackAttributes.containsKey(type)) {
+            stackAttributes.put(type, new HashMap<String, Map<String, String>>());
+          }
+          Map<String, Map<String, String>> typeAttrs = stackAttributes.get(type);
+          for (Map.Entry<String, Map<String, String>> attribute : stackTypeAttributes.entrySet()) {
+            String attributeName = attribute.getKey();
+            Map<String, String> attributes = typeAttrs.get(attributeName);
+            if (attributes == null) {
+              attributes = new HashMap<String, String>();
+              typeAttrs.put(attributeName, attributes);
+            }
+            attributes.putAll(attribute.getValue());
+          }
+        }
+      }
+    }
+    return new Configuration(stackConfigs, stackAttributes);
+  }
+
   /**
    * Parse components for the specified service from the stack definition.
    *
    * @param service  service name
    *
-   * @throws org.apache.ambari.server.AmbariException an exception occurred getting components from the stack definition
+   * @throws AmbariException an exception occurred getting components from the stack definition
    */
   private void parseComponents(String service) throws AmbariException{
     Collection<String> componentSet = new HashSet<String>();
 
-    Set<StackServiceComponentResponse> components = ambariManagementController.getStackComponents(
+    Set<StackServiceComponentResponse> components = controller.getStackComponents(
         Collections.singleton(new StackServiceComponentRequest(name, version, service, null)));
 
     // stack service components
@@ -417,12 +598,16 @@ class Stack {
       }
 
       // populate component dependencies
-      Collection<DependencyInfo> componentDependencies = BaseBlueprintProcessor.stackInfo.getComponentDependencies(
+      //todo: remove usage of AmbariMetaInfo
+      Collection<DependencyInfo> componentDependencies = controller.getAmbariMetaInfo().getComponentDependencies(
           name, version, service, componentName);
 
       if (componentDependencies != null && ! componentDependencies.isEmpty()) {
         dependencies.put(componentName, componentDependencies);
       }
+      if (component.isMaster()) {
+        masterComponents.add(componentName);
+      }
     }
     serviceComponents.put(service, componentSet);
   }
@@ -432,19 +617,22 @@ class Stack {
    *
    * @param service  service name
    *
-   * @throws org.apache.ambari.server.AmbariException an exception occurred getting configurations from the stack definition
+   * @throws AmbariException an exception occurred getting configurations from the stack definition
    */
   private void parseConfigurations(String service) throws AmbariException {
     Map<String, Map<String, ConfigProperty>> mapServiceConfig = new HashMap<String, Map<String, ConfigProperty>>();
+    Map<String, Map<String, ConfigProperty>> mapRequiredServiceConfig = new HashMap<String, Map<String, ConfigProperty>>();
 
     serviceConfigurations.put(service, mapServiceConfig);
+    requiredServiceConfigurations.put(service, mapRequiredServiceConfig);
 
-    Set<StackConfigurationResponse> serviceConfigs = ambariManagementController.getStackConfigurations(
+    Set<StackConfigurationResponse> serviceConfigs = controller.getStackConfigurations(
         Collections.singleton(new StackConfigurationRequest(name, version, service, null)));
-    Set<StackConfigurationResponse> stackLevelConfigs = ambariManagementController.getStackLevelConfigurations(
+    Set<StackConfigurationResponse> stackLevelConfigs = controller.getStackLevelConfigurations(
         Collections.singleton(new StackLevelConfigurationRequest(name, version, null)));
     serviceConfigs.addAll(stackLevelConfigs);
 
+    // shouldn't have any required properties in stack level configuration
     for (StackConfigurationResponse config : serviceConfigs) {
       String type = config.getType();
       //strip .xml from type
@@ -456,8 +644,37 @@ class Stack {
         mapTypeConfig = new HashMap<String, ConfigProperty>();
         mapServiceConfig.put(type, mapTypeConfig);
       }
+      ConfigProperty property = new ConfigProperty(config);
+      mapTypeConfig.put(config.getPropertyName(), property);
+      if (config.isRequired()) {
+        Map<String, ConfigProperty> requiredTypeConfig = mapRequiredServiceConfig.get(type);
+        if (requiredTypeConfig == null) {
+          requiredTypeConfig = new HashMap<String, ConfigProperty>();
+          mapRequiredServiceConfig.put(type, requiredTypeConfig);
+        }
+        requiredTypeConfig.put(config.getPropertyName(), property);
+      }
+    }
+  }
+
+  private void parseStackConfigurations () throws AmbariException {
+
+    Set<StackConfigurationResponse> stackLevelConfigs = controller.getStackLevelConfigurations(
+        Collections.singleton(new StackLevelConfigurationRequest(name, version, null)));
+
+    for (StackConfigurationResponse config : stackLevelConfigs) {
+      String type = config.getType();
+      //strip .xml from type
+      if (type.endsWith(".xml")) {
+        type = type.substring(0, type.length() - 4);
+      }
+      Map<String, ConfigProperty> mapTypeConfig = stackConfigurations.get(type);
+      if (mapTypeConfig == null) {
+        mapTypeConfig = new HashMap<String, ConfigProperty>();
+        stackConfigurations.put(type, mapTypeConfig);
+      }
       mapTypeConfig.put(config.getPropertyName(),
-          new ConfigProperty(config.getPropertyValue(), config.getPropertyAttributes()));
+          new ConfigProperty(config));
     }
   }
 
@@ -477,4 +694,61 @@ class Stack {
   void registerConditionalDependencies() {
     dbDependencyInfo.put("MYSQL_SERVER", "global/hive_database");
   }
+
+  /**
+   * Contains a configuration property's value and attributes.
+   */
+  public static class ConfigProperty {
+    private String name;
+    private String value;
+    private Map<String, String> attributes;
+    private Set<PropertyInfo.PropertyType> propertyTypes;
+    private String type;
+
+    private ConfigProperty(StackConfigurationResponse config) {
+      this.name = config.getPropertyName();
+      this.value = config.getPropertyValue();
+      this.attributes = config.getPropertyAttributes();
+      this.propertyTypes = config.getPropertyType();
+      this.type = config.getType();
+    }
+
+    public ConfigProperty(String type, String name, String value) {
+      this.type = type;
+      this.name = name;
+      this.value = value;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public String getValue() {
+      return value;
+    }
+
+    public void setValue(String value) {
+      this.value = value;
+    }
+
+    public String getType() {
+      return type;
+    }
+
+    public Set<PropertyInfo.PropertyType> getPropertyTypes() {
+      return propertyTypes;
+    }
+
+    public void setPropertyTypes(Set<PropertyInfo.PropertyType> propertyTypes) {
+      this.propertyTypes = propertyTypes;
+    }
+
+    public Map<String, String> getAttributes() {
+      return attributes;
+    }
+
+    public void setAttributes(Map<String, String> attributes) {
+      this.attributes = attributes;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StageResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StageResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StageResourceProvider.java
index fd6b751..664fae3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StageResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StageResourceProvider.java
@@ -54,6 +54,7 @@ import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.topology.TopologyManager;
 
 /**
  * ResourceProvider for Stage
@@ -81,6 +82,9 @@ public class StageResourceProvider extends AbstractControllerResourceProvider im
   @Inject
   private static Provider<Clusters> clustersProvider = null;
 
+  @Inject
+  private static TopologyManager topologyManager;
+
   /**
    * Stage property constants.
    */
@@ -140,6 +144,9 @@ public class StageResourceProvider extends AbstractControllerResourceProvider im
     manualTransitionMap.put(HostRoleStatus.HOLDING, EnumSet.of(HostRoleStatus.COMPLETED, HostRoleStatus.ABORTED));
     manualTransitionMap.put(HostRoleStatus.HOLDING_FAILED, EnumSet.of(HostRoleStatus.PENDING, HostRoleStatus.FAILED, HostRoleStatus.ABORTED));
     manualTransitionMap.put(HostRoleStatus.HOLDING_TIMEDOUT, EnumSet.of(HostRoleStatus.PENDING, HostRoleStatus.TIMEDOUT, HostRoleStatus.ABORTED));
+    //todo: perhaps add a CANCELED status that just affects a stage and wont abort the request
+    //todo: so, if I scale 10 nodes and actually provision 5 and then later decide I don't want those
+    //todo: additional 5 nodes I can cancel them and the corresponding request will have a status of COMPLETED
   }
 
 
@@ -224,9 +231,16 @@ public class StageResourceProvider extends AbstractControllerResourceProvider im
     for (StageEntity entity : entities) {
       results.add(toResource(cache, entity, propertyIds));
     }
-
     cache.clear();
 
+    Collection<StageEntity> topologyManagerStages = topologyManager.getStages();
+    for (StageEntity entity : topologyManagerStages) {
+      Resource stageResource = toResource(entity, propertyIds);
+      if (predicate.evaluate(stageResource)) {
+        results.add(stageResource);
+      }
+    }
+
     return results;
   }
 
@@ -363,6 +377,60 @@ public class StageResourceProvider extends AbstractControllerResourceProvider im
   }
 
   /**
+   * Converts the {@link StageEntity} to a {@link Resource}.
+   *
+   * @param entity        the entity to convert (not {@code null})
+   * @param requestedIds  the properties requested (not {@code null})
+   *
+   * @return the new resource
+   */
+  //todo: almost exactly the same as other toResource except how summaries are obtained
+  //todo: refactor to combine the two with the summary logic extracted
+  private Resource toResource(StageEntity entity, Set<String> requestedIds) {
+
+    Resource resource = new ResourceImpl(Resource.Type.Stage);
+
+    Long clusterId = entity.getClusterId();
+    if (clusterId != null && !clusterId.equals(Long.valueOf(-1L))) {
+      try {
+        Cluster cluster = clusters.getClusterById(clusterId);
+
+        setResourceProperty(resource, STAGE_CLUSTER_NAME, cluster.getClusterName(), requestedIds);
+      } catch (Exception e) {
+        LOG.error("Can not get information for cluster " + clusterId + ".", e );
+      }
+    }
+
+    Map<Long, HostRoleCommandStatusSummaryDTO> summary =
+        topologyManager.getStageSummaries(entity.getRequestId());
+
+    setResourceProperty(resource, STAGE_STAGE_ID, entity.getStageId(), requestedIds);
+    setResourceProperty(resource, STAGE_REQUEST_ID, entity.getRequestId(), requestedIds);
+    setResourceProperty(resource, STAGE_CONTEXT, entity.getRequestContext(), requestedIds);
+    setResourceProperty(resource, STAGE_CLUSTER_HOST_INFO, entity.getClusterHostInfo(), requestedIds);
+    setResourceProperty(resource, STAGE_COMMAND_PARAMS, entity.getCommandParamsStage(), requestedIds);
+    setResourceProperty(resource, STAGE_HOST_PARAMS, entity.getHostParamsStage(), requestedIds);
+    setResourceProperty(resource, STAGE_SKIPPABLE, entity.isSkippable(), requestedIds);
+
+    Long startTime = Long.MAX_VALUE;
+    Long endTime = 0L;
+    if (summary.containsKey(entity.getStageId())) {
+      startTime = summary.get(entity.getStageId()).getStartTime();
+      endTime = summary.get(entity.getStageId()).getEndTime();
+    }
+
+    setResourceProperty(resource, STAGE_START_TIME, startTime, requestedIds);
+    setResourceProperty(resource, STAGE_END_TIME, endTime, requestedIds);
+
+    CalculatedStatus status = CalculatedStatus.statusFromStageSummary(summary, Collections.singleton(entity.getStageId()));
+
+    setResourceProperty(resource, STAGE_PROGRESS_PERCENT, status.getPercent(), requestedIds);
+    setResourceProperty(resource, STAGE_STATUS, status.getStatus().toString(), requestedIds);
+
+    return resource;
+  }
+
+  /**
    * Ensure that cluster information is available.
    *
    * @return the clusters information

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/BlueprintDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/BlueprintDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/BlueprintDAO.java
index 9b58422..8c14a29 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/BlueprintDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/BlueprintDAO.java
@@ -25,6 +25,7 @@ import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.entities.BlueprintEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
 
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
@@ -43,6 +44,9 @@ public class BlueprintDAO {
   @Inject
   Provider<EntityManager> entityManagerProvider;
 
+  @Inject
+  StackDAO stackDAO;
+
   /**
    * Find a blueprint with a given name.
    *
@@ -76,6 +80,7 @@ public class BlueprintDAO {
    */
   @Transactional
   public void refresh(BlueprintEntity blueprintEntity) {
+    ensureStackIdSet(blueprintEntity);
     entityManagerProvider.get().refresh(blueprintEntity);
   }
 
@@ -86,6 +91,7 @@ public class BlueprintDAO {
    */
   @Transactional
   public void create(BlueprintEntity blueprintEntity) {
+    ensureStackIdSet(blueprintEntity);
     entityManagerProvider.get().persist(blueprintEntity);
   }
 
@@ -97,6 +103,7 @@ public class BlueprintDAO {
    */
   @Transactional
   public BlueprintEntity merge(BlueprintEntity blueprintEntity) {
+    ensureStackIdSet(blueprintEntity);
     return entityManagerProvider.get().merge(blueprintEntity);
   }
 
@@ -107,6 +114,7 @@ public class BlueprintDAO {
    */
   @Transactional
   public void remove(BlueprintEntity blueprintEntity) {
+    ensureStackIdSet(blueprintEntity);
     entityManagerProvider.get().remove(merge(blueprintEntity));
   }
 
@@ -118,4 +126,11 @@ public class BlueprintDAO {
   public void removeByName(String blueprint_name) {
     entityManagerProvider.get().remove(findByName(blueprint_name));
   }
+
+  private void ensureStackIdSet(BlueprintEntity entity) {
+    StackEntity stack = entity.getStack();
+    if (stack != null && stack.getStackId() == null) {
+      entity.setStack(stackDAO.find(stack.getStackName(), stack.getStackVersion()));
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
index 71a64af..21813ba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
@@ -19,10 +19,9 @@
 package org.apache.ambari.server.orm.entities;
 
 import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
+import com.google.gson.Gson;
 
 import javax.persistence.CascadeType;
 import javax.persistence.Column;
@@ -35,12 +34,6 @@ import javax.persistence.OneToOne;
 import javax.persistence.Table;
 import javax.persistence.Transient;
 
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.state.PropertyInfo;
-import org.apache.ambari.server.state.ServiceInfo;
-
-import com.google.gson.Gson;
 
 /**
  * Entity representing a Blueprint.
@@ -71,7 +64,7 @@ public class BlueprintEntity {
   private Collection<BlueprintConfigEntity> configurations;
 
   @Transient
-  private Gson jsonSerializer = new Gson();
+  private static Gson jsonSerializer = new Gson();
 
 
   /**
@@ -146,116 +139,4 @@ public class BlueprintEntity {
   public void setConfigurations(Collection<BlueprintConfigEntity> configurations) {
     this.configurations = configurations;
   }
-
-  /**
-   * Validate all configurations.  Validation is done on the operational configuration of each
-   * host group.  An operational configuration is achieved by overlaying host group configuration
-   * on top of cluster configuration which overlays the default stack configurations.
-   *
-   * @param stackInfo          stack information
-   * @param validatePasswords  whether password properties should be validated
-   * @return map of required properties which are missing.  Empty map if none are missing.
-   *
-   * @throws IllegalArgumentException if blueprint contains invalid information
-   */
-  public Map<String, Map<String, Collection<String>>> validateConfigurations(
-      AmbariMetaInfo stackInfo, boolean validatePasswords) {
-
-    StackEntity stack = getStack();
-    String stackName = stack.getStackName();
-    String stackVersion = stack.getStackVersion();
-
-    Map<String, Map<String, Collection<String>>> missingProperties =
-        new HashMap<String, Map<String, Collection<String>>>();
-    Map<String, Map<String, String>> clusterConfigurations = getConfigurationAsMap(getConfigurations());
-
-    for (HostGroupEntity hostGroup : getHostGroups()) {
-      Collection<String> processedServices = new HashSet<String>();
-      Map<String, Collection<String>> allRequiredProperties = new HashMap<String, Collection<String>>();
-      Map<String, Map<String, String>> operationalConfiguration =
-          new HashMap<String, Map<String, String>>(clusterConfigurations);
-
-      operationalConfiguration.putAll(getConfigurationAsMap(hostGroup.getConfigurations()));
-      for (HostGroupComponentEntity component : hostGroup.getComponents()) {
-        //check that MYSQL_SERVER component is not available while hive using existing db
-        if (component.getName().equals("MYSQL_SERVER")) {
-          Map<String, String> hiveEnvConfig = clusterConfigurations.get("hive-env");
-          if (hiveEnvConfig != null && !hiveEnvConfig.isEmpty() && hiveEnvConfig.get("hive_database") != null
-                  && hiveEnvConfig.get("hive_database").startsWith("Existing")) {
-            throw new IllegalArgumentException("Incorrect configuration: MYSQL_SERVER component is available but hive" +
-                    " using existing db!");
-          }
-        }
-
-        //for now, AMBARI is not recognized as a service in Stacks
-        if (! component.getName().equals("AMBARI_SERVER")) {
-          ServiceInfo service;
-          String serviceName;
-          try {
-            serviceName = stackInfo.getComponentToService(stackName, stackVersion, component.getName());
-            service = stackInfo.getService(stackName, stackVersion, serviceName);
-          } catch (AmbariException e) {
-            throw new IllegalArgumentException("Unable to determine the service associated with the" +
-                " component: " + component.getName());
-          }
-          if (processedServices.add(serviceName)) {
-            Map<String, PropertyInfo> serviceRequirements = service.getRequiredProperties();
-            for (PropertyInfo propertyInfo : serviceRequirements.values()) {
-              if (! (validatePasswords ^ propertyInfo.getPropertyTypes().contains(PropertyInfo.PropertyType.PASSWORD))) {
-                String configCategory = propertyInfo.getFilename();
-                if (configCategory.endsWith(".xml")) {
-                  configCategory = configCategory.substring(0, configCategory.indexOf(".xml"));
-                }
-                Collection<String> typeRequirements = allRequiredProperties.get(configCategory);
-                if (typeRequirements == null) {
-                  typeRequirements = new HashSet<String>();
-                  allRequiredProperties.put(configCategory, typeRequirements);
-                }
-                typeRequirements.add(propertyInfo.getName());
-              }
-            }
-          }
-        }
-      }
-      for (Map.Entry<String, Collection<String>> requiredTypeProperties : allRequiredProperties.entrySet()) {
-        String requiredCategory = requiredTypeProperties.getKey();
-        Collection<String> requiredProperties = requiredTypeProperties.getValue();
-        Collection<String> operationalTypeProps = operationalConfiguration.containsKey(requiredCategory) ?
-            operationalConfiguration.get(requiredCategory).keySet() :
-            Collections.<String>emptyList();
-
-        requiredProperties.removeAll(operationalTypeProps);
-        if (! requiredProperties.isEmpty()) {
-          String hostGroupName = hostGroup.getName();
-          Map<String, Collection<String>> hostGroupMissingProps = missingProperties.get(hostGroupName);
-          if (hostGroupMissingProps == null) {
-            hostGroupMissingProps = new HashMap<String, Collection<String>>();
-            missingProperties.put(hostGroupName, hostGroupMissingProps);
-          }
-          hostGroupMissingProps.put(requiredCategory, requiredProperties);
-        }
-      }
-    }
-    return missingProperties;
-  }
-
-  /**
-   * Obtain configuration as a map of config type to corresponding properties.
-   *
-   * @param configurations  configuration to include in map
-   *
-   * @return map of config type to map of properties
-   */
-  private Map<String, Map<String, String>> getConfigurationAsMap(
-      Collection<? extends BlueprintConfiguration> configurations) {
-
-    Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
-    for (BlueprintConfiguration config : configurations) {
-      String type = config.getType();
-      Map<String, String> typeProperties = jsonSerializer.<Map<String, String>>fromJson(
-          config.getConfigData(), Map.class);
-      properties.put(type, typeProperties);
-    }
-    return properties;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java
new file mode 100644
index 0000000..c4504ff
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+/**
+ * Indicates that the requested Stack doesn't esist.
+ */
+public class NoSuchStackException extends Exception {
+  public NoSuchStackException(String stackName, String stackVersion) {
+    super(String.format("The requested stack doesn't exist. Name='%s' Version='%s'", stackName, stackVersion));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 19fe2dd..10204ea 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -80,6 +80,12 @@ public interface Cluster {
    */
   List<ServiceComponentHost> getServiceComponentHosts(String hostname);
 
+  /**
+   * Get all hosts associated with this cluster.
+   *
+   * @return collection of hosts that are associated with this cluster
+   */
+  public Collection<Host> getHosts();
 
   /**
    * Get all of the hosts running the provided service and component.

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 39219a3..3764dd1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2500,6 +2500,25 @@ public class ClusterImpl implements Cluster {
     return components.get(componentName).getServiceComponentHosts().keySet();
   }
 
+  @Override
+  public Collection<Host> getHosts() {
+    //todo: really, this class doesn't have a getName() method???
+    String clusterName = clusterEntity.getClusterName();
+
+    Map<String, Host> hosts;
+
+    try {
+      //todo: why the hell does this method throw AmbariException???
+      //todo: this is ridiculous that I need to get hosts for this cluster from Clusters!!!
+      //todo: should I getHosts using the same logic as the other getHosts call?  At least that doesn't throw AmbariException.
+      hosts =  clusters.getHostsForCluster(clusterName);
+    } catch (AmbariException e) {
+      //todo: in what conditions is AmbariException thrown?
+      throw new RuntimeException("Unable to get hosts for cluster: " + clusterName, e);
+    }
+    return hosts == null ? Collections.<Host>emptyList() : hosts.values();
+  }
+
   private ClusterHealthReport getClusterHealthReport(
       Map<String, Host> clusterHosts) throws AmbariException {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
index 50d762e..27f4800 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
@@ -27,6 +27,7 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.HostNotFoundException;
 import org.apache.ambari.server.agent.AgentEnv;
 import org.apache.ambari.server.agent.DiskInfo;
 import org.apache.ambari.server.agent.HostInfo;
@@ -63,6 +64,7 @@ import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.fsm.SingleArcTransition;
 import org.apache.ambari.server.state.fsm.StateMachine;
 import org.apache.ambari.server.state.fsm.StateMachineFactory;
+import org.apache.ambari.server.topology.TopologyManager;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -139,6 +141,8 @@ public class HostImpl implements Host {
   @Inject
   private AmbariEventPublisher eventPublisher;
 
+  private static TopologyManager topologyManager;
+
   private static final StateMachineFactory
     <HostImpl, HostState, HostEventType, HostEvent>
       stateMachineFactory
@@ -241,6 +245,8 @@ public class HostImpl implements Host {
     clusterDAO = injector.getInstance(ClusterDAO.class);
     clusters = injector.getInstance(Clusters.class);
     hostConfigMappingDAO = injector.getInstance(HostConfigMappingDAO.class);
+    //todo: proper static injection
+    HostImpl.topologyManager = injector.getInstance(TopologyManager.class);
 
     hostStateEntity = hostEntity.getHostStateEntity();
     if (hostStateEntity == null) {
@@ -281,6 +287,18 @@ public class HostImpl implements Host {
         + ", registrationTime=" + e.registrationTime
         + ", agentVersion=" + agentVersion);
       host.persist();
+      //todo: proper host joined notification
+      boolean associatedWithCluster = false;
+      try {
+        associatedWithCluster = host.clusters.getClustersForHost(host.getPublicHostName()).size() > 0;
+      } catch (HostNotFoundException e1) {
+        associatedWithCluster = false;
+      } catch (AmbariException e1) {
+        // only HostNotFoundException is thrown
+        e1.printStackTrace();
+      }
+
+      topologyManager.onHostRegistered(host, associatedWithCluster);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
new file mode 100644
index 0000000..fa65022
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+
+import java.util.Collection;
+import java.util.Map;
+
+/**
+ * Blueprint representation.
+ */
+public interface Blueprint {
+
+  /**
+   * Get the name of the blueprint.
+   *
+   * @return blueprint name
+   */
+  public String getName();
+
+  /**
+   * Get the hot groups contained in the blueprint.
+   * @return map of host group name to host group
+   */
+  public Map<String, HostGroup> getHostGroups();
+
+  /**
+   * Get a hostgroup specified by name.
+   *
+   * @param name  name of the host group to get
+   *
+   * @return the host group with the given name or null
+   */
+  public HostGroup getHostGroup(String name);
+
+  /**
+   * Get the Blueprint cluster scoped configuration.
+   * The blueprint cluster scoped configuration has the stack
+   * configuration with the config types associated with the blueprint
+   * set as it's parent.
+   *
+   * @return blueprint cluster scoped configuration
+   */
+  public Configuration getConfiguration();
+
+  /**
+   * Get all of the services represented in the blueprint.
+   *
+   * @return collection of all represented service names
+   */
+  public Collection<String> getServices();
+
+  /**
+   * Get the components that are included in the blueprint for the specified service.
+   *
+   * @param service  service name
+   *
+   * @return collection of component names for the service.  Will not return null.
+   */
+  public Collection<String> getComponents(String service);
+
+  /**
+   * Get the stack associated with the blueprint.
+   *
+   * @return associated stack
+   */
+  public Stack getStack();
+
+  /**
+   * Get the host groups which contain components for the specified service.
+   *
+   * @param service  service name
+   *
+   * @return collection of host groups containing components for the specified service;
+   *         will not return null
+   */
+  public Collection<HostGroup> getHostGroupsForService(String service);
+
+  /**
+   * Get the host groups which contain the give component.
+   *
+   * @param component  component name
+   *
+   * @return collection of host groups containing the specified component; will not return null
+   */
+  public Collection<HostGroup> getHostGroupsForComponent(String component);
+
+  /**
+   * Validate the blueprint topology.
+   *
+   * @throws InvalidTopologyException if the topology is invalid
+   */
+  public void validateTopology() throws InvalidTopologyException;
+
+  /**
+   * Validate that the blueprint contains all of the required properties.
+   *
+   * @throws InvalidTopologyException if the blueprint doesn't contain all required properties
+   */
+  public void validateRequiredProperties() throws InvalidTopologyException;
+
+  /**
+   * Obtain the blueprint as an entity.
+   *
+   * @return entity representation of the blueprint
+   */
+  public BlueprintEntity toEntity();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
new file mode 100644
index 0000000..f02db81
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java
@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribut
+ * ed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.StackAccessException;
+import org.apache.ambari.server.controller.AmbariServer;
+import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.dao.BlueprintDAO;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+import org.apache.ambari.server.stack.NoSuchStackException;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Create a Blueprint instance.
+ */
+public class BlueprintFactory {
+
+  // Blueprints
+  protected static final String BLUEPRINT_NAME_PROPERTY_ID =
+      PropertyHelper.getPropertyId("Blueprints", "blueprint_name");
+  protected static final String STACK_NAME_PROPERTY_ID =
+      PropertyHelper.getPropertyId("Blueprints", "stack_name");
+  protected static final String STACK_VERSION_PROPERTY_ID =
+      PropertyHelper.getPropertyId("Blueprints", "stack_version");
+
+  // Host Groups
+  protected static final String HOST_GROUP_PROPERTY_ID = "host_groups";
+  protected static final String HOST_GROUP_NAME_PROPERTY_ID = "name";
+  protected static final String HOST_GROUP_CARDINALITY_PROPERTY_ID = "cardinality";
+
+  // Host Group Components
+  protected static final String COMPONENT_PROPERTY_ID ="components";
+  protected static final String COMPONENT_NAME_PROPERTY_ID ="name";
+
+  // Configurations
+  protected static final String CONFIGURATION_PROPERTY_ID = "configurations";
+  protected static final String PROPERTIES_PROPERTY_ID = "properties";
+  protected static final String PROPERTIES_ATTRIBUTES_PROPERTY_ID = "properties_attributes";
+
+  private static BlueprintDAO blueprintDAO;
+  private ConfigurationFactory configFactory = new ConfigurationFactory();
+
+  public Blueprint getBlueprint(String blueprintName) throws NoSuchStackException {
+    BlueprintEntity entity = blueprintDAO.findByName(blueprintName);
+    //todo: just return null?
+    return entity == null ? null : new BlueprintImpl(entity);
+  }
+
+  /**
+   * Convert a map of properties to a blueprint entity.
+   *
+   * @param properties  property map
+   * @return new blueprint entity
+   */
+  @SuppressWarnings("unchecked")
+  public Blueprint createBlueprint(Map<String, Object> properties) throws NoSuchStackException {
+    String name = String.valueOf(properties.get(BLUEPRINT_NAME_PROPERTY_ID));
+    // String.valueOf() will return "null" if value is null
+    if (name.equals("null") || name.isEmpty()) {
+      //todo: should throw a checked exception from here
+      throw new IllegalArgumentException("Blueprint name must be provided");
+    }
+
+    Stack stack = createStack(properties);
+    Collection<HostGroup> hostGroups = processHostGroups(name, stack, properties);
+    Configuration configuration = configFactory.getConfiguration((Collection<Map<String, String>>)
+        properties.get(CONFIGURATION_PROPERTY_ID));
+
+    return new BlueprintImpl(name, hostGroups, stack, configuration);
+  }
+
+  //todo: StackFactory
+  protected Stack createStack(Map<String, Object> properties) throws NoSuchStackException {
+    String stackName = String.valueOf(properties.get(STACK_NAME_PROPERTY_ID));
+    String stackVersion = String.valueOf(properties.get(STACK_VERSION_PROPERTY_ID));
+    try {
+      //todo: don't pass in controller
+      return new Stack(stackName, stackVersion, AmbariServer.getController());
+    } catch (StackAccessException e) {
+      throw new NoSuchStackException(stackName, stackVersion);
+    } catch (AmbariException e) {
+      //todo:
+      throw new RuntimeException("An error occurred parsing the stack information.", e);
+    }
+  }
+
+  //todo: Move logic to HostGroupImpl
+  @SuppressWarnings("unchecked")
+  private Collection<HostGroup> processHostGroups(String bpName, Stack stack, Map<String, Object> properties) {
+    Set<HashMap<String, Object>> hostGroupProps = (HashSet<HashMap<String, Object>>)
+        properties.get(HOST_GROUP_PROPERTY_ID);
+
+    if (hostGroupProps == null || hostGroupProps.isEmpty()) {
+      throw new IllegalArgumentException("At least one host group must be specified in a blueprint");
+    }
+
+    Collection<HostGroup> hostGroups = new ArrayList<HostGroup>();
+    for (HashMap<String, Object> hostGroupProperties : hostGroupProps) {
+      String hostGroupName = (String) hostGroupProperties.get(HOST_GROUP_NAME_PROPERTY_ID);
+      if (hostGroupName == null || hostGroupName.isEmpty()) {
+        throw new IllegalArgumentException("Every host group must include a non-null 'name' property");
+      }
+
+      HashSet<HashMap<String, String>> componentProps = (HashSet<HashMap<String, String>>)
+          hostGroupProperties.get(COMPONENT_PROPERTY_ID);
+
+      Collection<Map<String, String>> configProps = (Collection<Map<String, String>>)
+          hostGroupProperties.get(CONFIGURATION_PROPERTY_ID);
+
+      Collection<String> components = processHostGroupComponents(stack, hostGroupName, componentProps);
+      Configuration configuration = configFactory.getConfiguration(configProps);
+      String cardinality = String.valueOf(hostGroupProperties.get(HOST_GROUP_CARDINALITY_PROPERTY_ID));
+
+      HostGroup group = new HostGroupImpl(hostGroupName, bpName, stack, components, configuration, cardinality);
+
+      hostGroups.add(group);
+    }
+    return hostGroups;
+  }
+
+  private Collection<String> processHostGroupComponents(Stack stack, String groupName, HashSet<HashMap<String, String>>  componentProps) {
+    if (componentProps == null || componentProps.isEmpty()) {
+      throw new IllegalArgumentException("Host group '" + groupName + "' must contain at least one component");
+    }
+
+    Collection<String> stackComponentNames = getAllStackComponents(stack);
+    Collection<String> components = new ArrayList<String>();
+
+    for (HashMap<String, String> componentProperties : componentProps) {
+      String componentName = componentProperties.get(COMPONENT_NAME_PROPERTY_ID);
+      if (componentName == null || componentName.isEmpty()) {
+        throw new IllegalArgumentException("Host group '" + groupName +
+            "' contains a component with no 'name' property");
+      }
+
+      if (! stackComponentNames.contains(componentName)) {
+        throw new IllegalArgumentException("The component '" + componentName + "' in host group '" +
+            groupName + "' is not valid for the specified stack");
+      }
+      components.add(componentName);
+
+    }
+    return components;
+  }
+
+  /**
+   * Obtain all component names for the specified stack.
+   *
+   * @return collection of component names for the specified stack
+   * @throws IllegalArgumentException if the specified stack doesn't exist
+   */
+  private Collection<String> getAllStackComponents(Stack stack) {
+    Collection<String> allComponents = new HashSet<String>();
+    for (Collection<String> components: stack.getComponents().values()) {
+      allComponents.addAll(components);
+    }
+    // currently ambari server is no a recognized component
+    allComponents.add("AMBARI_SERVER");
+
+    return allComponents;
+  }
+
+
+  /**
+   * Static initialization.
+   *
+   * @param dao  blueprint data access object
+   */
+  @Inject
+  public static void init(BlueprintDAO dao) {
+    blueprintDAO   = dao;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
new file mode 100644
index 0000000..f27d4ab
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java
@@ -0,0 +1,397 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribut
+ * ed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+
+import com.google.gson.Gson;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.StackAccessException;
+import org.apache.ambari.server.controller.AmbariServer;
+import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.orm.entities.BlueprintConfigEntity;
+import org.apache.ambari.server.orm.entities.BlueprintConfiguration;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+import org.apache.ambari.server.orm.entities.HostGroupComponentEntity;
+import org.apache.ambari.server.orm.entities.HostGroupConfigEntity;
+import org.apache.ambari.server.orm.entities.HostGroupEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.stack.NoSuchStackException;
+
+/**
+ * Blueprint implementation.
+ */
+public class BlueprintImpl implements Blueprint {
+
+  private String name;
+  private Map<String, HostGroup> hostGroups = new HashMap<String, HostGroup>();
+  private Stack stack;
+  private Configuration configuration;
+  private BlueprintValidator validator;
+
+
+  public BlueprintImpl(BlueprintEntity entity) throws NoSuchStackException {
+    this.name = entity.getBlueprintName();
+
+    parseStack(entity.getStack());
+
+    // create config first because it is set as a parent on all host-group configs
+    processConfiguration(entity.getConfigurations());
+    parseBlueprintHostGroups(entity);
+    configuration.setParentConfiguration(stack.getConfiguration(getServices()));
+    validator = new BlueprintValidatorImpl(this);
+  }
+
+  public BlueprintImpl(String name, Collection<HostGroup> groups, Stack stack, Configuration configuration) {
+    this.name = name;
+    this.stack = stack;
+
+    // caller should set host group configs
+    for (HostGroup hostGroup : groups) {
+      hostGroups.put(hostGroup.getName(), hostGroup);
+    }
+    // if the parent isn't set, the stack configuration is set as the parent
+    this.configuration = configuration;
+    if (configuration.getParentConfiguration() == null) {
+      configuration.setParentConfiguration(stack.getConfiguration(getServices()));
+    }
+    validator = new BlueprintValidatorImpl(this);
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getStackName() {
+    return stack.getName();
+  }
+
+  public String getStackVersion() {
+    return stack.getVersion();
+  }
+
+  //todo: safe copy?
+  @Override
+  public Map<String, HostGroup> getHostGroups() {
+    return hostGroups;
+  }
+
+  //todo: safe copy?
+  @Override
+  public HostGroup getHostGroup(String name) {
+    return hostGroups.get(name);
+  }
+
+  @Override
+  public Configuration getConfiguration() {
+    return configuration;
+  }
+
+  /**
+   * Get all services represented in blueprint.
+   *
+   * @return collections of all services provided by topology
+   */
+  @Override
+  public Collection<String> getServices() {
+    Collection<String> services = new HashSet<String>();
+    for (HostGroup group : getHostGroups().values()) {
+      services.addAll(group.getServices());
+    }
+    return services;
+  }
+
+  @Override
+  public Collection<String> getComponents(String service) {
+    Collection<String> components = new HashSet<String>();
+    for (HostGroup group : getHostGroupsForService(service)) {
+      components.addAll(group.getComponents(service));
+    }
+
+    return components;
+  }
+
+  @Override
+  public Stack getStack() {
+    return stack;
+  }
+
+  /**
+   * Get host groups which contain a component.
+   *
+   * @param component   component name
+   *
+   * @return collection of host groups which contain the specified component
+   */
+  @Override
+  public Collection<HostGroup> getHostGroupsForComponent(String component) {
+    Collection<HostGroup> resultGroups = new HashSet<HostGroup>();
+    for (HostGroup group : hostGroups.values() ) {
+      if (group.getComponents().contains(component)) {
+        resultGroups.add(group);
+      }
+    }
+    return resultGroups;
+  }
+
+  /**
+   * Get host groups which contain a component for the given service.
+   *
+   * @param service   service name
+   *
+   * @return collection of host groups which contain a component of the specified service
+   */
+  @Override
+  public Collection<HostGroup> getHostGroupsForService(String service) {
+    Collection<HostGroup> resultGroups = new HashSet<HostGroup>();
+    for (HostGroup group : hostGroups.values() ) {
+      if (group.getServices().contains(service)) {
+        resultGroups.add(group);
+      }
+    }
+    return resultGroups;
+  }
+
+  @Override
+  public void validateTopology() throws InvalidTopologyException {
+    validator.validateTopology();
+  }
+
+  public BlueprintEntity toEntity() {
+
+    BlueprintEntity entity = new BlueprintEntity();
+    entity.setBlueprintName(name);
+
+    //todo: not using stackDAO so stackEntity.id is not set
+    //todo: this is now being set in BlueprintDAO
+    StackEntity stackEntity = new StackEntity();
+    stackEntity.setStackName(stack.getName());
+    stackEntity.setStackVersion(stack.getVersion());
+    entity.setStack(stackEntity);
+
+    createHostGroupEntities(entity);
+    createBlueprintConfigEntities(entity);
+
+    return entity;
+  }
+
+  /**
+   * Validate blueprint configuration.
+   *
+   * @throws InvalidTopologyException if the blueprint configuration is invalid
+   */
+  @Override
+  public void validateRequiredProperties() throws InvalidTopologyException {
+    validator.validateRequiredProperties();
+  }
+
+  private void parseStack(StackEntity stackEntity) throws NoSuchStackException {
+    try {
+      //todo: don't pass in controller
+      stack = new Stack(stackEntity.getStackName(), stackEntity.getStackVersion(), AmbariServer.getController());
+    } catch (StackAccessException e) {
+      throw new NoSuchStackException(stackEntity.getStackName(), stackEntity.getStackVersion());
+    } catch (AmbariException e) {
+    //todo:
+      throw new RuntimeException("An error occurred parsing the stack information.", e);
+    }
+  }
+
+  private Map<String, HostGroup> parseBlueprintHostGroups(BlueprintEntity entity) {
+    for (HostGroupEntity hostGroupEntity : entity.getHostGroups()) {
+      HostGroupImpl hostGroup = new HostGroupImpl(hostGroupEntity, getName(), stack);
+      // set the bp configuration as the host group config parent
+      hostGroup.getConfiguration().setParentConfiguration(configuration);
+      hostGroups.put(hostGroupEntity.getName(), hostGroup);
+    }
+    return hostGroups;
+  }
+
+  /**
+   * Process blueprint configurations.  This includes obtaining the default configuration properties
+   * from the stack and overlaying configuration properties specified in the blueprint.
+   */
+  private void processConfiguration(Collection<BlueprintConfigEntity> configs) {
+    // not setting stack configuration as parent until after host groups are parsed in constructor
+    configuration = new Configuration(parseConfigurations(configs),
+        parseAttributes(configs), null);
+  }
+
+  /**
+   * Obtain configuration as a map of config type to corresponding properties.
+   *
+   * @return map of config type to map of properties
+   */
+  private Map<String, Map<String, String>> parseConfigurations(Collection<BlueprintConfigEntity> configs) {
+
+    Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+    Gson gson = new Gson();
+    for (BlueprintConfiguration config : configs) {
+      String type = config.getType();
+      Map<String, String> typeProperties = gson.<Map<String, String>>fromJson(
+          config.getConfigData(), Map.class);
+      properties.put(type, typeProperties);
+    }
+    return properties;
+  }
+
+  /**
+   * Process cluster scoped configuration attributes contained in blueprint.
+   *
+   * @return cluster scoped property attributes contained within in blueprint
+   */
+  //todo: do inline with config processing
+  private Map<String, Map<String, Map<String, String>>> parseAttributes(Collection<BlueprintConfigEntity> configs) {
+    Map<String, Map<String, Map<String, String>>> mapAttributes =
+        new HashMap<String, Map<String, Map<String, String>>>();
+
+    if (configs != null) {
+      Gson gson = new Gson();
+      for (BlueprintConfigEntity config : configs) {
+        Map<String, Map<String, String>> typeAttrs =
+            gson.<Map<String, Map<String, String>>>fromJson(config.getConfigAttributes(), Map.class);
+        if (typeAttrs != null && !typeAttrs.isEmpty()) {
+          mapAttributes.put(config.getType(), typeAttrs);
+        }
+      }
+    }
+    return mapAttributes;
+  }
+
+  /**
+   * Create host group entities and add to the parent blueprint entity.
+   */
+  @SuppressWarnings("unchecked")
+  private void createHostGroupEntities(BlueprintEntity blueprintEntity) {
+    Collection<HostGroupEntity> entities = new ArrayList<HostGroupEntity>();
+    for (HostGroup group : getHostGroups().values()) {
+      HostGroupEntity hostGroupEntity = new HostGroupEntity();
+      entities.add(hostGroupEntity);
+
+      hostGroupEntity.setName(group.getName());
+      hostGroupEntity.setBlueprintEntity(blueprintEntity);
+      hostGroupEntity.setBlueprintName(getName());
+      hostGroupEntity.setCardinality(group.getCardinality());
+
+      createHostGroupConfigEntities(hostGroupEntity, group.getConfiguration());
+
+      createComponentEntities(hostGroupEntity, group.getComponents());
+    }
+    blueprintEntity.setHostGroups(entities);
+  }
+
+  /**
+   * Populate host group configurations.
+   */
+  private void createHostGroupConfigEntities(HostGroupEntity hostGroup, Configuration groupConfiguration) {
+    Gson jsonSerializer = new Gson();
+    Map<String, HostGroupConfigEntity> configEntityMap = new HashMap<String, HostGroupConfigEntity>();
+    for (Map.Entry<String, Map<String, String>> propEntry : groupConfiguration.getProperties().entrySet()) {
+      String type = propEntry.getKey();
+      Map<String, String> properties = propEntry.getValue();
+
+      HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+      configEntityMap.put(type, configEntity);
+      configEntity.setBlueprintName(getName());
+      configEntity.setHostGroupEntity(hostGroup);
+      configEntity.setHostGroupName(hostGroup.getName());
+      configEntity.setType(type);
+      configEntity.setConfigData(jsonSerializer.toJson(properties));
+    }
+
+    for (Map.Entry<String, Map<String, Map<String, String>>> attributesEntry : groupConfiguration.getAttributes().entrySet()) {
+      String type = attributesEntry.getKey();
+      Map<String, Map<String, String>> attributes = attributesEntry.getValue();
+
+      HostGroupConfigEntity entity = configEntityMap.get(type);
+      if (entity == null) {
+        entity = new HostGroupConfigEntity();
+        configEntityMap.put(type, entity);
+        entity.setBlueprintName(getName());
+        entity.setHostGroupEntity(hostGroup);
+        entity.setHostGroupName(hostGroup.getName());
+        entity.setType(type);
+      }
+      entity.setConfigAttributes(jsonSerializer.toJson(attributes));
+    }
+    hostGroup.setConfigurations(configEntityMap.values());
+  }
+
+  /**
+    * Create component entities and add to parent host group.
+    */
+  @SuppressWarnings("unchecked")
+  private void createComponentEntities(HostGroupEntity group, Collection<String> components) {
+    Collection<HostGroupComponentEntity> componentEntities = new HashSet<HostGroupComponentEntity>();
+    group.setComponents(componentEntities);
+
+    for (String component : components) {
+      HostGroupComponentEntity componentEntity = new HostGroupComponentEntity();
+      componentEntities.add(componentEntity);
+
+      componentEntity.setName(component);
+      componentEntity.setBlueprintName(group.getBlueprintName());
+      componentEntity.setHostGroupEntity(group);
+      componentEntity.setHostGroupName(group.getName());
+    }
+    group.setComponents(componentEntities);
+  }
+
+  /**
+   * Populate host group configurations.
+   */
+  private void createBlueprintConfigEntities(BlueprintEntity blueprintEntity) {
+    Gson jsonSerializer = new Gson();
+    Configuration config = getConfiguration();
+    Map<String, BlueprintConfigEntity> configEntityMap = new HashMap<String, BlueprintConfigEntity>();
+    for (Map.Entry<String, Map<String, String>> propEntry : config.getProperties().entrySet()) {
+      String type = propEntry.getKey();
+      Map<String, String> properties = propEntry.getValue();
+
+      BlueprintConfigEntity configEntity = new BlueprintConfigEntity();
+      configEntityMap.put(type, configEntity);
+      configEntity.setBlueprintName(getName());
+      configEntity.setBlueprintEntity(blueprintEntity);
+      configEntity.setType(type);
+      configEntity.setConfigData(jsonSerializer.toJson(properties));
+    }
+
+    for (Map.Entry<String, Map<String, Map<String, String>>> attributesEntry : config.getAttributes().entrySet()) {
+      String type = attributesEntry.getKey();
+      Map<String, Map<String, String>> attributes = attributesEntry.getValue();
+
+      BlueprintConfigEntity entity = configEntityMap.get(type);
+      if (entity == null) {
+        entity = new BlueprintConfigEntity();
+        configEntityMap.put(type, entity);
+        entity.setBlueprintName(getName());
+        entity.setBlueprintEntity(blueprintEntity);
+        entity.setType(type);
+      }
+      entity.setConfigAttributes(jsonSerializer.toJson(attributes));
+    }
+    blueprintEntity.setConfigurations(configEntityMap.values());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9f0dd0b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
new file mode 100644
index 0000000..206d161
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distribut
+ * ed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.topology;
+
+
+/**
+ * Provides blueprint validation.
+ */
+public interface BlueprintValidator {
+  /**
+   * Validate blueprint topology.
+   *
+   * @throws InvalidTopologyException if the topology is invalid
+   */
+  public void validateTopology() throws InvalidTopologyException;
+
+  /**
+   * Validate that required properties are provided.
+   * This doesn't include password properties.
+   *
+   * @throws InvalidTopologyException if required properties are not set in blueprint
+   */
+  public void validateRequiredProperties() throws InvalidTopologyException;
+}