You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2016/12/08 01:25:43 UTC

[3/4] ambari git commit: Merge branch 'branch-feature-AMBARI-18456' into trunk

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
index 9917720..fe1f338 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
@@ -17,18 +17,22 @@
  */
 package org.apache.ambari.server.state.configgroup;
 
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.controller.ConfigGroupResponse;
 import org.apache.ambari.server.controller.internal.ConfigurationResourceProvider;
+import org.apache.ambari.server.logging.LockFactory;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
 import org.apache.ambari.server.orm.dao.ConfigGroupDAO;
@@ -44,213 +48,195 @@ import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
 import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
 import com.google.inject.persist.Transactional;
 
 public class ConfigGroupImpl implements ConfigGroup {
   private static final Logger LOG = LoggerFactory.getLogger(ConfigGroupImpl.class);
-  private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
 
   private Cluster cluster;
-  private ConfigGroupEntity configGroupEntity;
-  private Map<Long, Host> hosts;
-  private Map<String, Config> configurations;
-  private volatile boolean isPersisted = false;
-
-  @Inject
-  private Gson gson;
-  @Inject
-  private ConfigGroupDAO configGroupDAO;
-  @Inject
-  private ConfigGroupConfigMappingDAO configGroupConfigMappingDAO;
-  @Inject
-  private ConfigGroupHostMappingDAO configGroupHostMappingDAO;
-  @Inject
-  private HostDAO hostDAO;
-  @Inject
-  private ClusterDAO clusterDAO;
-  @Inject
-  Clusters clusters;
+  private ConcurrentMap<Long, Host> m_hosts;
+  private ConcurrentMap<String, Config> m_configurations;
+  private String configGroupName;
+  private long configGroupId;
+
+  /**
+   * This lock is required to prevent inconsistencies in internal state between
+   * {@link #m_hosts} and the entities stored by the {@link ConfigGroupEntity}.
+   */
+  private final ReadWriteLock hostLock;
+
+  /**
+   * A label for {@link #hostLock} to use with the {@link LockFactory}.
+   */
+  private static final String hostLockLabel = "configurationGroupHostLock";
+
+  private final ConfigGroupDAO configGroupDAO;
+
+  private final ConfigGroupConfigMappingDAO configGroupConfigMappingDAO;
+
+  private final ConfigGroupHostMappingDAO configGroupHostMappingDAO;
+
+  private final HostDAO hostDAO;
+
+  private final ClusterDAO clusterDAO;
+
+  private final ConfigFactory configFactory;
 
   @AssistedInject
-  public ConfigGroupImpl(@Assisted("cluster") Cluster cluster,
-                         @Assisted("name") String name,
-                         @Assisted("tag") String tag,
-                         @Assisted("description") String description,
-                         @Assisted("configs") Map<String, Config> configs,
-                         @Assisted("hosts") Map<Long, Host> hosts,
-                         Injector injector) {
-    injector.injectMembers(this);
+  public ConfigGroupImpl(@Assisted("cluster") Cluster cluster, @Assisted("name") String name,
+      @Assisted("tag") String tag, @Assisted("description") String description,
+      @Assisted("configs") Map<String, Config> configurations,
+      @Assisted("hosts") Map<Long, Host> hosts, Clusters clusters, ConfigFactory configFactory,
+      ClusterDAO clusterDAO, HostDAO hostDAO, ConfigGroupDAO configGroupDAO,
+      ConfigGroupConfigMappingDAO configGroupConfigMappingDAO,
+      ConfigGroupHostMappingDAO configGroupHostMappingDAO, LockFactory lockFactory) {
+
+    this.configFactory = configFactory;
+    this.clusterDAO = clusterDAO;
+    this.hostDAO = hostDAO;
+    this.configGroupDAO = configGroupDAO;
+    this.configGroupConfigMappingDAO = configGroupConfigMappingDAO;
+    this.configGroupHostMappingDAO = configGroupHostMappingDAO;
+
+    hostLock = lockFactory.newReadWriteLock(hostLockLabel);
+
     this.cluster = cluster;
+    configGroupName = name;
 
-    configGroupEntity = new ConfigGroupEntity();
+    ConfigGroupEntity configGroupEntity = new ConfigGroupEntity();
     configGroupEntity.setClusterId(cluster.getClusterId());
     configGroupEntity.setGroupName(name);
     configGroupEntity.setTag(tag);
     configGroupEntity.setDescription(description);
 
-    if (hosts != null) {
-      this.hosts = hosts;
-    } else {
-      this.hosts = new HashMap<Long, Host>();
-    }
+    m_hosts = hosts == null ? new ConcurrentHashMap<Long, Host>()
+        : new ConcurrentHashMap<>(hosts);
 
-    if (configs != null) {
-      configurations = configs;
-    } else {
-      configurations = new HashMap<String, Config>();
-    }
+    m_configurations = configurations == null ? new ConcurrentHashMap<String, Config>()
+        : new ConcurrentHashMap<>(configurations);
+
+    // save the entity and grab the ID
+    persist(configGroupEntity);
+    configGroupId = configGroupEntity.getGroupId();
   }
 
   @AssistedInject
-  public ConfigGroupImpl(@Assisted Cluster cluster,
-                         @Assisted ConfigGroupEntity configGroupEntity,
-                         Injector injector) {
-    injector.injectMembers(this);
+  public ConfigGroupImpl(@Assisted Cluster cluster, @Assisted ConfigGroupEntity configGroupEntity,
+      Clusters clusters, ConfigFactory configFactory,
+      ClusterDAO clusterDAO, HostDAO hostDAO, ConfigGroupDAO configGroupDAO,
+      ConfigGroupConfigMappingDAO configGroupConfigMappingDAO,
+      ConfigGroupHostMappingDAO configGroupHostMappingDAO, LockFactory lockFactory) {
+
+    this.configFactory = configFactory;
+    this.clusterDAO = clusterDAO;
+    this.hostDAO = hostDAO;
+    this.configGroupDAO = configGroupDAO;
+    this.configGroupConfigMappingDAO = configGroupConfigMappingDAO;
+    this.configGroupHostMappingDAO = configGroupHostMappingDAO;
+
+    hostLock = lockFactory.newReadWriteLock(hostLockLabel);
+
     this.cluster = cluster;
+    configGroupId = configGroupEntity.getGroupId();
+    configGroupName = configGroupEntity.getGroupName();
 
-    this.configGroupEntity = configGroupEntity;
-    configurations = new HashMap<String, Config>();
-    hosts = new HashMap<Long, Host>();
+    m_configurations = new ConcurrentHashMap<String, Config>();
+    m_hosts = new ConcurrentHashMap<Long, Host>();
 
     // Populate configs
-    for (ConfigGroupConfigMappingEntity configMappingEntity : configGroupEntity
-      .getConfigGroupConfigMappingEntities()) {
-
+    for (ConfigGroupConfigMappingEntity configMappingEntity : configGroupEntity.getConfigGroupConfigMappingEntities()) {
       Config config = cluster.getConfig(configMappingEntity.getConfigType(),
         configMappingEntity.getVersionTag());
 
       if (config != null) {
-        configurations.put(config.getType(), config);
+        m_configurations.put(config.getType(), config);
       } else {
-        LOG.warn("Unable to find config mapping for config group"
-          + ", clusterName = " + cluster.getClusterName()
-          + ", type = " + configMappingEntity.getConfigType()
-          + ", tag = " + configMappingEntity.getVersionTag());
+        LOG.warn("Unable to find config mapping {}/{} for config group in cluster {}",
+            configMappingEntity.getConfigType(), configMappingEntity.getVersionTag(),
+            cluster.getClusterName());
       }
     }
 
     // Populate Hosts
-    for (ConfigGroupHostMappingEntity hostMappingEntity : configGroupEntity
-      .getConfigGroupHostMappingEntities()) {
-
+    for (ConfigGroupHostMappingEntity hostMappingEntity : configGroupEntity.getConfigGroupHostMappingEntities()) {
       try {
         Host host = clusters.getHost(hostMappingEntity.getHostname());
         HostEntity hostEntity = hostMappingEntity.getHostEntity();
         if (host != null && hostEntity != null) {
-          hosts.put(hostEntity.getHostId(), host);
+          m_hosts.put(hostEntity.getHostId(), host);
         }
       } catch (AmbariException e) {
-        String msg = "Host seems to be deleted but Config group mapping still " +
-          "exists !";
-        LOG.warn(msg);
-        LOG.debug(msg, e);
+        LOG.warn("Host seems to be deleted but Config group mapping still exists !");
+        LOG.debug("Host seems to be deleted but Config group mapping still exists !", e);
       }
     }
-
-    isPersisted = true;
   }
 
   @Override
   public Long getId() {
-    return configGroupEntity.getGroupId();
+    return configGroupId;
   }
 
   @Override
   public String getName() {
-    readWriteLock.readLock().lock();
-    try {
-      return configGroupEntity.getGroupName();
-    } finally {
-      readWriteLock.readLock().unlock();
-    }
+    return configGroupName;
   }
 
   @Override
   public void setName(String name) {
-    readWriteLock.writeLock().lock();
-    try {
-      configGroupEntity.setGroupName(name);
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    configGroupEntity.setGroupName(name);
+    configGroupDAO.merge(configGroupEntity);
 
+    configGroupName = name;
   }
 
   @Override
   public String getClusterName() {
-    return configGroupEntity.getClusterEntity().getClusterName();
+    return cluster.getClusterName();
   }
 
   @Override
   public String getTag() {
-    readWriteLock.readLock().lock();
-    try {
-      return configGroupEntity.getTag();
-    } finally {
-      readWriteLock.readLock().unlock();
-    }
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    return configGroupEntity.getTag();
   }
 
   @Override
   public void setTag(String tag) {
-    readWriteLock.writeLock().lock();
-    try {
-      configGroupEntity.setTag(tag);
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    configGroupEntity.setTag(tag);
+    configGroupDAO.merge(configGroupEntity);
   }
 
   @Override
   public String getDescription() {
-    readWriteLock.readLock().lock();
-    try {
-      return configGroupEntity.getDescription();
-    } finally {
-      readWriteLock.readLock().unlock();
-    }
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    return configGroupEntity.getDescription();
   }
 
   @Override
   public void setDescription(String description) {
-    readWriteLock.writeLock().lock();
-    try {
-      configGroupEntity.setDescription(description);
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    configGroupEntity.setDescription(description);
+    configGroupDAO.merge(configGroupEntity);
   }
 
   @Override
   public Map<Long, Host> getHosts() {
-    readWriteLock.readLock().lock();
-    try {
-      return Collections.unmodifiableMap(hosts);
-    } finally {
-      readWriteLock.readLock().unlock();
-    }
+    return Collections.unmodifiableMap(m_hosts);
   }
 
   @Override
   public Map<String, Config> getConfigurations() {
-    readWriteLock.readLock().lock();
-    try {
-      return Collections.unmodifiableMap(configurations);
-    } finally {
-      readWriteLock.readLock().unlock();
-    }
-
+    return Collections.unmodifiableMap(m_configurations);
   }
 
   /**
@@ -259,13 +245,14 @@ public class ConfigGroupImpl implements ConfigGroup {
    */
   @Override
   public void setHosts(Map<Long, Host> hosts) {
-    readWriteLock.writeLock().lock();
+    hostLock.writeLock().lock();
     try {
-      this.hosts = hosts;
+      // persist enitites in a transaction first, then update internal state
+      replaceHostMappings(hosts);
+      m_hosts = new ConcurrentHashMap<>(hosts);
     } finally {
-      readWriteLock.writeLock().unlock();
+      hostLock.writeLock().unlock();
     }
-
   }
 
   /**
@@ -273,115 +260,140 @@ public class ConfigGroupImpl implements ConfigGroup {
    * @param configs
    */
   @Override
-  public void setConfigurations(Map<String, Config> configs) {
-    readWriteLock.writeLock().lock();
-    try {
-      configurations = configs;
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-
+  public void setConfigurations(Map<String, Config> configurations) {
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    ClusterEntity clusterEntity = configGroupEntity.getClusterEntity();
+
+    // only update the internal state after the configurations have been
+    // persisted
+    persistConfigMapping(clusterEntity, configGroupEntity, configurations);
+    m_configurations = new ConcurrentHashMap<>(configurations);
   }
 
   @Override
-  @Transactional
   public void removeHost(Long hostId) throws AmbariException {
-    readWriteLock.writeLock().lock();
+    hostLock.writeLock().lock();
     try {
-      if (hosts.containsKey(hostId)) {
-        String hostName = hosts.get(hostId).getHostName();
-        LOG.info("Removing host from config group, hostid = " + hostId + ", hostname = " + hostName);
-        hosts.remove(hostId);
-        try {
-          ConfigGroupHostMappingEntityPK hostMappingEntityPK = new
-            ConfigGroupHostMappingEntityPK();
-          hostMappingEntityPK.setHostId(hostId);
-          hostMappingEntityPK.setConfigGroupId(configGroupEntity.getGroupId());
-          configGroupHostMappingDAO.removeByPK(hostMappingEntityPK);
-        } catch (Exception e) {
-          LOG.error("Failed to delete config group host mapping"
-            + ", clusterName = " + getClusterName()
-            + ", id = " + getId()
-            + ", hostid = " + hostId
-            + ", hostname = " + hostName, e);
-          throw new AmbariException(e.getMessage());
-        }
+      Host host = m_hosts.get(hostId);
+      if (null == host) {
+        return;
       }
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-  }
 
-  @Override
-  public void persist() {
-    readWriteLock.writeLock().lock();
-    try {
-      if (!isPersisted) {
-        persistEntities();
-        refresh();
-        cluster.refresh();
-        isPersisted = true;
-      } else {
-        saveIfPersisted();
+      String hostName = host.getHostName();
+      LOG.info("Removing host (id={}, name={}) from config group", host.getHostId(), hostName);
+
+      try {
+        // remove the entities first, then update internal state
+        removeConfigGroupHostEntity(host);
+        m_hosts.remove(hostId);
+      } catch (Exception e) {
+        LOG.error("Failed to delete config group host mapping for cluster {} and host {}",
+            cluster.getClusterName(), hostName, e);
+
+        throw new AmbariException(e.getMessage());
       }
     } finally {
-      readWriteLock.writeLock().unlock();
+      hostLock.writeLock().unlock();
     }
   }
 
   /**
+   * Removes the {@link ConfigGroupHostMappingEntity} for the specified host
+   * from this configuration group.
+   *
+   * @param host
+   *          the host to remove.
+   */
+  @Transactional
+  void removeConfigGroupHostEntity(Host host) {
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    ConfigGroupHostMappingEntityPK hostMappingEntityPK = new ConfigGroupHostMappingEntityPK();
+    hostMappingEntityPK.setHostId(host.getHostId());
+    hostMappingEntityPK.setConfigGroupId(configGroupId);
+
+    ConfigGroupHostMappingEntity configGroupHostMapping = configGroupHostMappingDAO.findByPK(
+        hostMappingEntityPK);
+
+    configGroupHostMappingDAO.remove(configGroupHostMapping);
+
+    configGroupEntity.getConfigGroupHostMappingEntities().remove(configGroupHostMapping);
+    configGroupEntity = configGroupDAO.merge(getConfigGroupEntity());
+  }
+
+  /**
+   * @param configGroupEntity
+   */
+  private void persist(ConfigGroupEntity configGroupEntity) {
+    persistEntities(configGroupEntity);
+    cluster.refresh();
+  }
+
+  /**
    * Persist Config group with host mapping and configurations
    *
    * @throws Exception
    */
   @Transactional
-  void persistEntities() {
+  void persistEntities(ConfigGroupEntity configGroupEntity) {
     ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId());
     configGroupEntity.setClusterEntity(clusterEntity);
     configGroupEntity.setTimestamp(System.currentTimeMillis());
     configGroupDAO.create(configGroupEntity);
 
-    persistConfigMapping(clusterEntity);
-    persistHostMapping();
-  }
+    configGroupId = configGroupEntity.getGroupId();
 
-  // TODO: Test rollback scenario
+    persistConfigMapping(clusterEntity, configGroupEntity, m_configurations);
+    replaceHostMappings(m_hosts);
+  }
 
   /**
-   * Persist host mapping
+   * Replaces all existing host mappings with the new collection of hosts.
    *
+   * @param the
+   *          new hosts
    * @throws Exception
    */
-  @Override
   @Transactional
-  public void persistHostMapping() {
-    if (isPersisted) {
-      // Delete existing mappings and create new ones
-      configGroupHostMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
-      configGroupEntity.setConfigGroupHostMappingEntities(new HashSet<ConfigGroupHostMappingEntity>());
-    }
+  void replaceHostMappings(Map<Long, Host> hosts) {
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+
+    // Delete existing mappings and create new ones
+    configGroupHostMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
+    configGroupEntity.setConfigGroupHostMappingEntities(
+        new HashSet<ConfigGroupHostMappingEntity>());
 
     if (hosts != null && !hosts.isEmpty()) {
-      for (Host host : hosts.values()) {
-        HostEntity hostEntity = hostDAO.findById(host.getHostId());
-        if (hostEntity != null) {
-          ConfigGroupHostMappingEntity hostMappingEntity = new
-            ConfigGroupHostMappingEntity();
-          hostMappingEntity.setHostId(hostEntity.getHostId());
-          hostMappingEntity.setHostEntity(hostEntity);
-          hostMappingEntity.setConfigGroupEntity(configGroupEntity);
-          hostMappingEntity.setConfigGroupId(configGroupEntity.getGroupId());
-          configGroupEntity.getConfigGroupHostMappingEntities().add
-                  (hostMappingEntity);
-          configGroupHostMappingDAO.create(hostMappingEntity);
-        } else {
-          LOG.warn("Host seems to be deleted, cannot create host to config " +
-            "group mapping, host = " + host.getHostName());
-        }
+      configGroupEntity = persistHostMapping(hosts.values(), configGroupEntity);
+    }
+  }
+
+  /**
+   * Adds the collection of hosts to the configuration group.
+   *
+   * @param hostEntity
+   * @param configGroupEntity
+   */
+  @Transactional
+  ConfigGroupEntity persistHostMapping(Collection<Host> hosts,
+      ConfigGroupEntity configGroupEntity) {
+    for (Host host : hosts) {
+      HostEntity hostEntity = hostDAO.findById(host.getHostId());
+      if (hostEntity != null) {
+        ConfigGroupHostMappingEntity hostMappingEntity = new ConfigGroupHostMappingEntity();
+        hostMappingEntity.setHostId(hostEntity.getHostId());
+        hostMappingEntity.setHostEntity(hostEntity);
+        hostMappingEntity.setConfigGroupEntity(configGroupEntity);
+        hostMappingEntity.setConfigGroupId(configGroupEntity.getGroupId());
+        configGroupEntity.getConfigGroupHostMappingEntities().add(hostMappingEntity);
+        configGroupHostMappingDAO.create(hostMappingEntity);
+      } else {
+        LOG.warn(
+            "The host {} has been removed from the cluster and cannot be added to the configuration group {}",
+            host.getHostName(), configGroupName);
       }
     }
-    // TODO: Make sure this does not throw Nullpointer based on JPA docs
-    configGroupEntity = configGroupDAO.merge(configGroupEntity);
+
+    return configGroupDAO.merge(configGroupEntity);
   }
 
   /**
@@ -391,42 +403,31 @@ public class ConfigGroupImpl implements ConfigGroup {
    * @throws Exception
    */
   @Transactional
-  void persistConfigMapping(ClusterEntity clusterEntity) {
-    if (isPersisted) {
-      configGroupConfigMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
-      configGroupEntity.setConfigGroupConfigMappingEntities(new HashSet<ConfigGroupConfigMappingEntity>());
-    }
+  void persistConfigMapping(ClusterEntity clusterEntity,
+      ConfigGroupEntity configGroupEntity, Map<String, Config> configurations) {
+    configGroupConfigMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
+    configGroupEntity.setConfigGroupConfigMappingEntities(
+        new HashSet<ConfigGroupConfigMappingEntity>());
 
     if (configurations != null && !configurations.isEmpty()) {
-      for (Config config : configurations.values()) {
+      for (Entry<String, Config> entry : configurations.entrySet()) {
+        Config config = entry.getValue();
         ClusterConfigEntity clusterConfigEntity = clusterDAO.findConfig
           (cluster.getClusterId(), config.getType(), config.getTag());
 
         if (clusterConfigEntity == null) {
-          config.setVersion(cluster.getNextConfigVersion(config.getType()));
-          config.setStackId(cluster.getDesiredStackVersion());
-          // Create configuration
-          clusterConfigEntity = new ClusterConfigEntity();
-          clusterConfigEntity.setClusterId(clusterEntity.getClusterId());
-          clusterConfigEntity.setClusterEntity(clusterEntity);
-          clusterConfigEntity.setStack(clusterEntity.getDesiredStack());
-          clusterConfigEntity.setType(config.getType());
-          clusterConfigEntity.setVersion(config.getVersion());
-          clusterConfigEntity.setTag(config.getTag());
-          clusterConfigEntity.setData(gson.toJson(config.getProperties()));
-          if (null != config.getPropertiesAttributes()) {
-            clusterConfigEntity.setAttributes(gson.toJson(config.getPropertiesAttributes()));
-          }
-          clusterConfigEntity.setTimestamp(System.currentTimeMillis());
-          clusterDAO.createConfig(clusterConfigEntity);
-          clusterEntity.getClusterConfigEntities().add(clusterConfigEntity);
-          cluster.addConfig(config);
-          clusterDAO.merge(clusterEntity);
-          cluster.refresh();
+          config = configFactory.createNew(cluster, config.getType(), config.getTag(),
+              config.getProperties(), config.getPropertiesAttributes());
+
+          entry.setValue(config);
+
+          clusterConfigEntity = clusterDAO.findConfig(cluster.getClusterId(), config.getType(),
+              config.getTag());
         }
 
         ConfigGroupConfigMappingEntity configMappingEntity =
           new ConfigGroupConfigMappingEntity();
+
         configMappingEntity.setTimestamp(System.currentTimeMillis());
         configMappingEntity.setClusterId(clusterEntity.getClusterId());
         configMappingEntity.setClusterConfigEntity(clusterConfigEntity);
@@ -443,142 +444,84 @@ public class ConfigGroupImpl implements ConfigGroup {
     }
   }
 
-  void saveIfPersisted() {
-    if (isPersisted) {
-      save(clusterDAO.findById(cluster.getClusterId()));
-    }
-  }
-
-  @Transactional
-  void save(ClusterEntity clusterEntity) {
-    persistHostMapping();
-    persistConfigMapping(clusterEntity);
-  }
-
   @Override
+  @Transactional
   public void delete() {
-    readWriteLock.writeLock().lock();
-    try {
-      configGroupConfigMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
-      configGroupHostMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
-      configGroupDAO.removeByPK(configGroupEntity.getGroupId());
-      cluster.refresh();
-      isPersisted = false;
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
+    configGroupConfigMappingDAO.removeAllByGroup(configGroupId);
+    configGroupHostMappingDAO.removeAllByGroup(configGroupId);
+    configGroupDAO.removeByPK(configGroupId);
+    cluster.refresh();
   }
 
   @Override
   public void addHost(Host host) throws AmbariException {
-    readWriteLock.writeLock().lock();
+    hostLock.writeLock().lock();
     try {
-      if (hosts != null && !hosts.isEmpty()) {
-        for (Host h : hosts.values()) {
-          if (h.getHostName().equals(host.getHostName())) {
-            throw new DuplicateResourceException("Host " + h.getHostName() +
-              "is already associated with Config Group " +
-              configGroupEntity.getGroupName());
-          }
-        }
-        HostEntity hostEntity = hostDAO.findByName(host.getHostName());
-        if (hostEntity != null) {
-          hosts.put(hostEntity.getHostId(), host);
-        }
-      }
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-  }
+      if (m_hosts.containsKey(host.getHostId())) {
+        String message = String.format(
+            "Host %s is already associated with the configuration group %s", host.getHostName(),
+            configGroupName);
 
-  @Override
-  public void addConfiguration(Config config) throws AmbariException {
-    readWriteLock.writeLock().lock();
-    try {
-      if (configurations != null && !configurations.isEmpty()) {
-        for (Config c : configurations.values()) {
-          if (c.getType().equals(config.getType()) && c.getTag().equals
-            (config.getTag())) {
-            throw new DuplicateResourceException("Config " + config.getType() +
-              " with tag " + config.getTag() + " is already associated " +
-              "with Config Group " + configGroupEntity.getGroupName());
-          }
-        }
-        configurations.put(config.getType(), config);
+        throw new DuplicateResourceException(message);
       }
+
+      // ensure that we only update the in-memory structure if the merge was
+      // successful
+      ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+      persistHostMapping(Collections.singletonList(host), configGroupEntity);
+      m_hosts.putIfAbsent(host.getHostId(), host);
     } finally {
-      readWriteLock.writeLock().unlock();
+      hostLock.writeLock().unlock();
     }
   }
 
   @Override
   public ConfigGroupResponse convertToResponse() throws AmbariException {
-    readWriteLock.readLock().lock();
-    try {
-      Set<Map<String, Object>> hostnames = new HashSet<Map<String, Object>>();
-      for (Host host : hosts.values()) {
-        Map<String, Object> hostMap = new HashMap<String, Object>();
-        hostMap.put("host_name", host.getHostName());
-        hostnames.add(hostMap);
-      }
-
-      Set<Map<String, Object>> configObjMap = new HashSet<Map<String, Object>>();
+    Set<Map<String, Object>> hostnames = new HashSet<Map<String, Object>>();
+    for (Host host : m_hosts.values()) {
+      Map<String, Object> hostMap = new HashMap<String, Object>();
+      hostMap.put("host_name", host.getHostName());
+      hostnames.add(hostMap);
+    }
 
-      for (Config config : configurations.values()) {
-        Map<String, Object> configMap = new HashMap<String, Object>();
-        configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TYPE_PROPERTY_ID,
-            config.getType());
-        configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TAG_PROPERTY_ID,
-            config.getTag());
-        configObjMap.add(configMap);
-      }
+    Set<Map<String, Object>> configObjMap = new HashSet<Map<String, Object>>();
 
-      ConfigGroupResponse configGroupResponse = new ConfigGroupResponse(
-          configGroupEntity.getGroupId(), cluster.getClusterName(),
-          configGroupEntity.getGroupName(), configGroupEntity.getTag(),
-          configGroupEntity.getDescription(), hostnames, configObjMap);
-      return configGroupResponse;
-    } finally {
-      readWriteLock.readLock().unlock();
+    for (Config config : m_configurations.values()) {
+      Map<String, Object> configMap = new HashMap<String, Object>();
+      configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TYPE_PROPERTY_ID,
+          config.getType());
+      configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TAG_PROPERTY_ID,
+          config.getTag());
+      configObjMap.add(configMap);
     }
-  }
 
-  @Override
-  @Transactional
-  public void refresh() {
-    readWriteLock.writeLock().lock();
-    try {
-      if (isPersisted) {
-        ConfigGroupEntity groupEntity = configGroupDAO.findById
-          (configGroupEntity.getGroupId());
-        configGroupDAO.refresh(groupEntity);
-        // TODO What other entities should refresh?
-      }
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    ConfigGroupResponse configGroupResponse = new ConfigGroupResponse(
+        configGroupEntity.getGroupId(), cluster.getClusterName(),
+        configGroupEntity.getGroupName(), configGroupEntity.getTag(),
+        configGroupEntity.getDescription(), hostnames, configObjMap);
+    return configGroupResponse;
   }
 
-
   @Override
   public String getServiceName() {
-    readWriteLock.readLock().lock();
-    try {
-      return configGroupEntity.getServiceName();
-    } finally {
-      readWriteLock.readLock().unlock();
-    }
-
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    return configGroupEntity.getServiceName();
   }
 
   @Override
   public void setServiceName(String serviceName) {
-    readWriteLock.writeLock().lock();
-    try {
-      configGroupEntity.setServiceName(serviceName);
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
+    ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+    configGroupEntity.setServiceName(serviceName);
+    configGroupDAO.merge(configGroupEntity);
+  }
 
+  /**
+   * Gets the {@link ConfigGroupEntity} by it's ID from the JPA cache.
+   *
+   * @return the entity.
+   */
+  private ConfigGroupEntity getConfigGroupEntity() {
+    return configGroupDAO.findById(configGroupId);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index e1f5cd2..5e887d4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -67,11 +67,10 @@ import org.apache.ambari.server.security.authorization.AuthorizationException;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.SecurityType;
-import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.utils.RetryHelper;
 import org.slf4j.Logger;
@@ -91,8 +90,13 @@ public class AmbariContext {
   @Inject
   private PersistedState persistedState;
 
+  /**
+   * Used for creating read-only instances of existing {@link Config} in order
+   * to send them to the {@link ConfigGroupResourceProvider} to create
+   * {@link ConfigGroup}s.
+   */
   @Inject
-  private org.apache.ambari.server.configuration.Configuration configs;
+  ConfigFactory configFactory;
 
   private static AmbariManagementController controller;
   private static ClusterController clusterController;
@@ -458,11 +462,13 @@ public class AmbariContext {
         SortedSet<DesiredConfig> desiredConfigsOrderedByVersion = new TreeSet<>(new Comparator<DesiredConfig>() {
           @Override
           public int compare(DesiredConfig o1, DesiredConfig o2) {
-            if (o1.getVersion() < o2.getVersion())
+            if (o1.getVersion() < o2.getVersion()) {
               return -1;
+            }
 
-            if (o1.getVersion() > o2.getVersion())
+            if (o1.getVersion() > o2.getVersion()) {
               return 1;
+            }
 
             return 0;
           }
@@ -473,9 +479,9 @@ public class AmbariContext {
         int tagMatchState = 0; // 0 -> INITIAL -> tagMatchState = 1 -> TOPLOGY_RESOLVED -> tagMatchState = 2
 
         for (DesiredConfig config: desiredConfigsOrderedByVersion) {
-          if (config.getTag().equals(TopologyManager.INITIAL_CONFIG_TAG) && tagMatchState == 0)
+          if (config.getTag().equals(TopologyManager.INITIAL_CONFIG_TAG) && tagMatchState == 0) {
             tagMatchState = 1;
-          else if (config.getTag().equals(TopologyManager.TOPOLOGY_RESOLVED_TAG) && tagMatchState == 1) {
+          } else if (config.getTag().equals(TopologyManager.TOPOLOGY_RESOLVED_TAG) && tagMatchState == 1) {
             tagMatchState = 2;
             break;
           }
@@ -551,7 +557,6 @@ public class AmbariContext {
           addedHost = true;
           if (! group.getHosts().containsKey(host.getHostId())) {
             group.addHost(host);
-            group.persistHostMapping();
           }
 
         } catch (AmbariException e) {
@@ -585,9 +590,7 @@ public class AmbariContext {
     for (Map.Entry<String, Map<String, String>> entry : userProvidedGroupProperties.entrySet()) {
       String type = entry.getKey();
       String service = stack.getServiceForConfigType(type);
-      Config config = new ConfigImpl(type);
-      config.setTag(groupName);
-      config.setProperties(entry.getValue());
+      Config config = configFactory.createReadOnly(type, groupName, entry.getValue(), null);
       //todo: attributes
       Map<String, Config> serviceConfigs = groupConfigs.get(service);
       if (serviceConfigs == null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
index 6a8057c..4c1ef5a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
@@ -53,8 +53,8 @@ import org.apache.ambari.server.orm.entities.TopologyRequestEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.apache.commons.lang.StringUtils;
@@ -234,12 +234,12 @@ public class HostUpdateHelper {
           boolean configUpdated;
 
           // going through all cluster configs and update property values
+          ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
           for (ClusterConfigEntity clusterConfigEntity : clusterConfigEntities) {
-            ConfigImpl config = new ConfigImpl(cluster, clusterConfigEntity, injector);
+            Config config = configFactory.createExisting(cluster, clusterConfigEntity);
             configUpdated = false;
 
             for (Map.Entry<String,String> property : config.getProperties().entrySet()) {
-
               updatedPropertyValue = replaceHosts(property.getValue(), currentHostNames, hostMapping);
 
               if (updatedPropertyValue != null) {
@@ -249,8 +249,9 @@ public class HostUpdateHelper {
                 configUpdated = true;
               }
             }
+
             if (configUpdated) {
-              config.persist(false);
+              config.save();
             }
           }
         }
@@ -317,6 +318,7 @@ public class HostUpdateHelper {
   * */
   public class StringComparator implements Comparator<String> {
 
+    @Override
     public int compare(String s1, String s2) {
       return s2.length() - s1.length();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
index ffca51d..62ce93b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.StackId;
@@ -128,24 +127,16 @@ public class ExecutionCommandWrapperTest {
     CONFIG_ATTRIBUTES = new HashMap<String, Map<String,String>>();
 
     //Cluster level global config
-    Config globalConfig = configFactory.createNew(cluster1, GLOBAL_CONFIG, GLOBAL_CLUSTER, CONFIG_ATTRIBUTES);
-    globalConfig.setTag(CLUSTER_VERSION_TAG);
-    cluster1.addConfig(globalConfig);
+    configFactory.createNew(cluster1, GLOBAL_CONFIG, CLUSTER_VERSION_TAG, GLOBAL_CLUSTER, CONFIG_ATTRIBUTES);
 
     //Cluster level service config
-    Config serviceSiteConfigCluster = configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_SITE_CLUSTER, CONFIG_ATTRIBUTES);
-    serviceSiteConfigCluster.setTag(CLUSTER_VERSION_TAG);
-    cluster1.addConfig(serviceSiteConfigCluster);
+    configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, CLUSTER_VERSION_TAG, SERVICE_SITE_CLUSTER, CONFIG_ATTRIBUTES);
 
     //Service level service config
-    Config serviceSiteConfigService = configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_SITE_SERVICE, CONFIG_ATTRIBUTES);
-    serviceSiteConfigService.setTag(SERVICE_VERSION_TAG);
-    cluster1.addConfig(serviceSiteConfigService);
+    configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_VERSION_TAG, SERVICE_SITE_SERVICE, CONFIG_ATTRIBUTES);
 
     //Host level service config
-    Config serviceSiteConfigHost = configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_SITE_HOST, CONFIG_ATTRIBUTES);
-    serviceSiteConfigHost.setTag(HOST_VERSION_TAG);
-    cluster1.addConfig(serviceSiteConfigHost);
+    configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, HOST_VERSION_TAG, SERVICE_SITE_HOST, CONFIG_ATTRIBUTES);
 
     ActionDBAccessor db = injector.getInstance(ActionDBAccessorImpl.class);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
index 90a4421..246c8b3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
@@ -34,8 +34,8 @@ import org.apache.ambari.server.orm.OrmTestHelper;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.StackId;
 import org.junit.After;
@@ -103,15 +103,11 @@ public class TestActionSchedulerThreading {
     Map<String, String> properties = new HashMap<String, String>();
     Map<String, Map<String, String>> propertiesAttributes = new HashMap<String, Map<String, String>>();
 
+    ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+
     // foo-type for v1 on current stack
     properties.put("foo-property-1", "foo-value-1");
-    Config c1 = new ConfigImpl(cluster, "foo-type", properties, propertiesAttributes, injector);
-    c1.setTag("version-1");
-    c1.setStackId(stackId);
-    c1.setVersion(1L);
-
-    cluster.addConfig(c1);
-    c1.persist();
+    Config c1 = configFactory.createNew(cluster, "foo-type", "version-1", properties, propertiesAttributes);
 
     // make v1 "current"
     cluster.addDesiredConfig("admin", Sets.newHashSet(c1), "note-1");
@@ -122,12 +118,7 @@ public class TestActionSchedulerThreading {
     // save v2
     // foo-type for v2 on new stack
     properties.put("foo-property-2", "foo-value-2");
-    Config c2 = new ConfigImpl(cluster, "foo-type", properties, propertiesAttributes, injector);
-    c2.setTag("version-2");
-    c2.setStackId(newStackId);
-    c2.setVersion(2L);
-    cluster.addConfig(c2);
-    c2.persist();
+    Config c2 = configFactory.createNew(cluster, "foo-type", "version-2", properties, propertiesAttributes);
 
     // make v2 "current"
     cluster.addDesiredConfig("admin", Sets.newHashSet(c2), "note-2");

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
index 43503fa..fc2bca5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
@@ -193,11 +193,7 @@ public class HeartbeatTestHelper {
     cluster.setCurrentStackVersion(stackId);
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config = cf.createNew(cluster, "cluster-env", configProperties, new HashMap<String, Map<String, String>>());
-    config.setTag("version1");
-    config.persist();
-
-    cluster.addConfig(config);
+    Config config = cf.createNew(cluster, "cluster-env", "version1", configProperties, new HashMap<String, Map<String, String>>());
     cluster.addDesiredConfig("user", Collections.singleton(config));
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
index 76ab45c..68e9993 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
@@ -159,10 +159,8 @@ public class TestHeartbeatMonitor {
     }};
 
     ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
-    Config config = configFactory.createNew(cluster, "hadoop-env",
+    Config config = configFactory.createNew(cluster, "hadoop-env", "version1",
         new HashMap<String,String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version1");
-    cluster.addConfig(config);
     cluster.addDesiredConfig("_test", Collections.singleton(config));
 
 
@@ -243,18 +241,15 @@ public class TestHeartbeatMonitor {
     }};
 
     ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
-    Config hadoopEnvConfig = configFactory.createNew(cluster, "hadoop-env",
+    Config hadoopEnvConfig = configFactory.createNew(cluster, "hadoop-env", "version1",
       new HashMap<String, String>() {{
         put("a", "b");
       }}, new HashMap<String, Map<String,String>>());
-    Config hbaseEnvConfig = configFactory.createNew(cluster, "hbase-env",
+    Config hbaseEnvConfig = configFactory.createNew(cluster, "hbase-env", "version1",
             new HashMap<String, String>() {{
               put("a", "b");
             }}, new HashMap<String, Map<String,String>>());
-    hadoopEnvConfig.setTag("version1");
-    cluster.addConfig(hadoopEnvConfig);
-    hbaseEnvConfig.setTag("version1");
-    cluster.addConfig(hbaseEnvConfig);
+
     cluster.addDesiredConfig("_test", Collections.singleton(hadoopEnvConfig));
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
index 6533e1c..6640837 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
@@ -218,7 +218,7 @@ public class RecoveryConfigHelperTest {
     config.updateProperties(new HashMap<String, String>() {{
       put(RecoveryConfigHelper.RECOVERY_ENABLED_KEY, "false");
     }});
-    config.persist(false);
+    config.save();
 
     // Recovery config should be stale because of the above change.
     boolean isConfigStale = recoveryConfigHelper.isConfigStale(cluster.getClusterName(), DummyHostname1,

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index e54a117..2507a46 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -87,8 +87,8 @@ import org.apache.ambari.server.security.ldap.LdapBatchDto;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
@@ -610,6 +610,7 @@ public class AmbariManagementControllerImplTest {
     Cluster cluster = createNiceMock(Cluster.class);
     ActionManager actionManager = createNiceMock(ActionManager.class);
     ClusterRequest clusterRequest = createNiceMock(ClusterRequest.class);
+    Config config = createNiceMock(Config.class);
 
     // requests
     Set<ClusterRequest> setRequests = Collections.singleton(clusterRequest);
@@ -632,18 +633,11 @@ public class AmbariManagementControllerImplTest {
     expect(clusters.getClusterById(1L)).andReturn(cluster).anyTimes();
     expect(cluster.getClusterName()).andReturn("clusterOld").anyTimes();
     expect(cluster.getConfigPropertiesTypes(anyObject(String.class))).andReturn(Maps.<PropertyInfo.PropertyType, Set<String>>newHashMap()).anyTimes();
-    expect(cluster.getDesiredConfigByType(anyObject(String.class))).andReturn(new ConfigImpl("config-type") {
-      @Override
-      public Map<String, Map<String, String>> getPropertiesAttributes() {
-        return Maps.newHashMap();
-      }
-
-      @Override
-      public Map<String, String> getProperties() {
-        return configReqProps;
-      }
 
-    }).anyTimes();
+    expect(config.getType()).andReturn("config-type").anyTimes();
+    expect(config.getProperties()).andReturn(configReqProps).anyTimes();
+    expect(config.getPropertiesAttributes()).andReturn(new HashMap<String,Map<String,String>>()).anyTimes();
+    expect(cluster.getDesiredConfigByType(anyObject(String.class))).andReturn(config).anyTimes();
 
     cluster.addSessionAttributes(anyObject(Map.class));
     expectLastCall().once();
@@ -652,7 +646,7 @@ public class AmbariManagementControllerImplTest {
     expectLastCall();
 
     // replay mocks
-    replay(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+    replay(actionManager, cluster, clusters, config, injector, clusterRequest, sessionManager);
 
     // test
     AmbariManagementController controller = new AmbariManagementControllerImpl(actionManager, clusters, injector);
@@ -660,7 +654,7 @@ public class AmbariManagementControllerImplTest {
 
     // assert and verify
     assertSame(controller, controllerCapture.getValue());
-    verify(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+    verify(actionManager, cluster, clusters, config, injector, clusterRequest, sessionManager);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 509ec88..298a85f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -122,7 +122,6 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostComponentAdminState;
 import org.apache.ambari.server.state.HostState;
@@ -408,7 +407,6 @@ public class AmbariManagementControllerTest {
     ConfigGroup configGroup = configGroupFactory.createNew(cluster, name,
       tag, "", configMap, hostMap);
 
-    configGroup.persist();
     cluster.addConfigGroup(configGroup);
 
     return configGroup.getId();
@@ -1940,10 +1938,8 @@ public class AmbariManagementControllerTest {
     Map<String, String> properties = new HashMap<String, String>();
     Map<String, Map<String, String>> propertiesAttributes = new HashMap<String, Map<String,String>>();
 
-    Config c1 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, injector);
-    c1.setTag("v1");
-    cluster.addConfig(c1);
-    c1.persist();
+    ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+    Config c1 = configFactory.createNew(cluster, "hdfs-site", "v1",  properties, propertiesAttributes);
     configs.put(c1.getType(), c1);
 
     ServiceRequest r = new ServiceRequest(cluster1, serviceName, State.INSTALLED.toString());
@@ -1983,26 +1979,17 @@ public class AmbariManagementControllerTest {
     properties.put("a", "a1");
     properties.put("b", "b1");
 
-    Config c1 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, injector);
+    ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+    Config c1 = configFactory.createNew(cluster, "hdfs-site", "v1", properties, propertiesAttributes);
     properties.put("c", cluster1);
     properties.put("d", "d1");
-    Config c2 = new ConfigImpl(cluster, "core-site", properties, propertiesAttributes, injector);
-    Config c3 = new ConfigImpl(cluster, "foo-site", properties, propertiesAttributes, injector);
+
+    Config c2 = configFactory.createNew(cluster, "core-site", "v1", properties, propertiesAttributes);
+    Config c3 = configFactory.createNew(cluster, "foo-site", "v1", properties, propertiesAttributes);
 
     Map<String, String> mapRequestProps = new HashMap<String, String>();
     mapRequestProps.put("context", "Called from a test");
 
-    c1.setTag("v1");
-    c2.setTag("v1");
-    c3.setTag("v1");
-
-    cluster.addConfig(c1);
-    cluster.addConfig(c2);
-    cluster.addConfig(c3);
-    c1.persist();
-    c2.persist();
-    c3.persist();
-
     configs.put(c1.getType(), c1);
     configs.put(c2.getType(), c2);
 
@@ -4210,27 +4197,20 @@ public class AmbariManagementControllerTest {
     cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
+    Config config1 = cf.createNew(cluster, "global", "version1",
         new HashMap<String, String>() {{
           put("key1", "value1");
         }}, new HashMap<String, Map<String, String>>());
-    config1.setTag("version1");
 
-    Config config2 = cf.createNew(cluster, "core-site",
+    Config config2 = cf.createNew(cluster, "core-site", "version1",
         new HashMap<String, String>() {{
           put("key1", "value1");
         }}, new HashMap<String, Map<String,String>>());
-    config2.setTag("version1");
 
-    Config config3 = cf.createNew(cluster, "yarn-site",
+    Config config3 = cf.createNew(cluster, "yarn-site", "version1",
         new HashMap<String, String>() {{
           put("test.password", "supersecret");
         }}, new HashMap<String, Map<String,String>>());
-    config3.setTag("version1");
-
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
-    cluster.addConfig(config3);
 
     Service hdfs = cluster.addService("HDFS");
     Service mapred = cluster.addService("YARN");
@@ -4383,20 +4363,15 @@ public class AmbariManagementControllerTest {
     cluster.setCurrentStackVersion(new StackId("HDP-2.0.7"));
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
+    Config config1 = cf.createNew(cluster, "global", "version1",
       new HashMap<String, String>() {{
         put("key1", "value1");
       }}, new HashMap<String, Map<String,String>>());
-    config1.setTag("version1");
 
-    Config config2 = cf.createNew(cluster, "core-site",
+    Config config2 = cf.createNew(cluster, "core-site", "version1",
       new HashMap<String, String>() {{
         put("key1", "value1");
       }}, new HashMap<String, Map<String,String>>());
-    config2.setTag("version1");
-
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
 
     Service hdfs = cluster.addService("HDFS");
 
@@ -4488,19 +4463,15 @@ public class AmbariManagementControllerTest {
     cluster.setCurrentStackVersion(new StackId("HDP-2.0.7"));
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
+    Config config1 = cf.createNew(cluster, "global", "version1",
         new HashMap<String, String>() {{
           put("key1", "value1");
         }}, new HashMap<String, Map<String,String>>());
-    config1.setTag("version1");
 
-    Config config2 = cf.createNew(cluster, "core-site",
+    Config config2 = cf.createNew(cluster, "core-site", "version1",
         new HashMap<String, String>() {{
           put("key1", "value1");
         }}, new HashMap<String, Map<String,String>>());
-    config2.setTag("version1");
-    config1.persist();
-    config2.persist();
 
     cluster.addConfig(config1);
     cluster.addConfig(config2);
@@ -4776,18 +4747,14 @@ public class AmbariManagementControllerTest {
     cluster.setCurrentStackVersion(new StackId("HDP-0.1"));
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
+    Config config1 = cf.createNew(cluster, "global", "version1",
         new HashMap<String, String>(){{ put("key1", "value1"); }}, new HashMap<String, Map<String,String>>());
-    config1.setTag("version1");
     config1.setPropertiesAttributes(new HashMap<String, Map<String, String>>(){{ put("attr1", new HashMap<String, String>()); }});
 
-    Config config2 = cf.createNew(cluster, "core-site",
+    Config config2 = cf.createNew(cluster, "core-site", "version1",
         new HashMap<String, String>(){{ put("key1", "value1"); }}, new HashMap<String, Map<String,String>>());
-    config2.setTag("version1");
     config2.setPropertiesAttributes(new HashMap<String, Map<String, String>>(){{ put("attr2", new HashMap<String, String>()); }});
 
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
     cluster.addDesiredConfig("_test", Collections.singleton(config1));
     cluster.addDesiredConfig("_test", Collections.singleton(config2));
 
@@ -5522,11 +5489,8 @@ public class AmbariManagementControllerTest {
       configs3, null);
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "kerberos-env",
+    Config config1 = cf.createNew(cluster, "kerberos-env", "version1",
         new HashMap<String, String>(), new HashMap<String, Map<String,String>>());
-    config1.setTag("version1");
-
-    cluster.addConfig(config1);
 
     ClusterRequest crReq = new ClusterRequest(cluster.getClusterId(), cluster1, null, null);
     crReq.setDesiredConfig(Collections.singletonList(cr1));
@@ -6448,20 +6412,15 @@ public class AmbariManagementControllerTest {
     cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
+    Config config1 = cf.createNew(cluster, "global", "version1",
       new HashMap<String, String>() {{
         put("key1", "value1");
       }}, new HashMap<String, Map<String,String>>());
-    config1.setTag("version1");
 
-    Config config2 = cf.createNew(cluster, "core-site",
+    Config config2 = cf.createNew(cluster, "core-site", "version1",
       new HashMap<String, String>() {{
         put("key1", "value1");
       }}, new HashMap<String, Map<String,String>>());
-    config2.setTag("version1");
-
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
 
     Service hdfs = cluster.addService("HDFS");
     Service mapred = cluster.addService("YARN");
@@ -6554,20 +6513,15 @@ public class AmbariManagementControllerTest {
     cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
 
     ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
+    Config config1 = cf.createNew(cluster, "global", "version1",
       new HashMap<String, String>() {{
         put("key1", "value1");
       }}, new HashMap<String, Map<String,String>>());
-    config1.setTag("version1");
 
-    Config config2 = cf.createNew(cluster, "core-site",
+    Config config2 = cf.createNew(cluster, "core-site", "version1",
       new HashMap<String, String>() {{
         put("key1", "value1");
       }}, new HashMap<String, Map<String,String>>());
-    config2.setTag("version1");
-
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
 
     Service hdfs = cluster.addService("HDFS");
     Service mapred = cluster.addService("YARN");
@@ -6981,13 +6935,13 @@ public class AmbariManagementControllerTest {
     String group2 = getUniqueName();
     String tag2 = getUniqueName();
 
+    ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+
     // Create Config group for core-site
     configs = new HashMap<String, String>();
     configs.put("a", "c");
     cluster = clusters.getCluster(cluster1);
-    final Config config = new ConfigImpl("core-site");
-    config.setProperties(configs);
-    config.setTag("version122");
+    final Config config =  configFactory.createReadOnly("core-site", "version122", configs, null);
     Long groupId = createConfigGroup(cluster, group1, tag1,
       new ArrayList<String>() {{ add(host1); }},
       new ArrayList<Config>() {{ add(config); }});
@@ -6998,9 +6952,7 @@ public class AmbariManagementControllerTest {
     configs = new HashMap<String, String>();
     configs.put("a", "c");
 
-    final Config config2 = new ConfigImpl("mapred-site");
-    config2.setProperties(configs);
-    config2.setTag("version122");
+    final Config config2 =  configFactory.createReadOnly("mapred-site", "version122", configs, null);
     groupId = createConfigGroup(cluster, group2, tag2,
       new ArrayList<String>() {{ add(host1); }},
       new ArrayList<Config>() {{ add(config2); }});
@@ -7065,7 +7017,6 @@ public class AmbariManagementControllerTest {
     ConfigGroup configGroup = cluster.getConfigGroups().get(groupId);
     configGroup.setHosts(new HashMap<Long, Host>() {{ put(3L,
       clusters.getHost(host3)); }});
-    configGroup.persist();
 
     requestId = startService(cluster1, serviceName2, false, false);
     mapredInstall = null;
@@ -7143,9 +7094,8 @@ public class AmbariManagementControllerTest {
     String group1 = getUniqueName();
     String tag1 = getUniqueName();
 
-    final Config config = new ConfigImpl("hdfs-site");
-    config.setProperties(configs);
-    config.setTag("version122");
+    ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+    final Config config = configFactory.createReadOnly("hdfs-site", "version122", configs, null);
     Long groupId = createConfigGroup(clusters.getCluster(cluster1), group1, tag1,
         new ArrayList<String>() {{
           add(host1);
@@ -7253,9 +7203,8 @@ public class AmbariManagementControllerTest {
     configs = new HashMap<String, String>();
     configs.put("a", "c");
 
-    final Config config = new ConfigImpl("hdfs-site");
-    config.setProperties(configs);
-    config.setTag("version122");
+    ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+    final Config config = configFactory.createReadOnly("hdfs-site", "version122", configs, null);
     Long groupId = createConfigGroup(clusters.getCluster(cluster1), group1, tag1,
       new ArrayList<String>() {{ add(host1); add(host2); }},
       new ArrayList<Config>() {{ add(config); }});

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
index 96810cf..1747b28 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
@@ -66,7 +66,7 @@ import org.apache.ambari.server.security.authorization.RoleAuthorization;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostState;
 import org.apache.ambari.server.state.RepositoryVersionState;
@@ -108,6 +108,7 @@ public class UpgradeResourceProviderHDP22Test {
   private AmbariManagementController amc;
   private StackDAO stackDAO;
   private TopologyManager topologyManager;
+  private ConfigFactory configFactory;
 
   private static final String configTagVersion1 = "version1";
   private static final String configTagVersion2 = "version2";
@@ -136,6 +137,7 @@ public class UpgradeResourceProviderHDP22Test {
     stackDAO = injector.getInstance(StackDAO.class);
     upgradeDao = injector.getInstance(UpgradeDAO.class);
     repoVersionDao = injector.getInstance(RepositoryVersionDAO.class);
+    configFactory = injector.getInstance(ConfigFactory.class);
 
     AmbariEventPublisher publisher = createNiceMock(AmbariEventPublisher.class);
     replay(publisher);
@@ -233,11 +235,7 @@ public class UpgradeResourceProviderHDP22Test {
       }
     }
 
-    Config config = new ConfigImpl("hive-site");
-    config.setProperties(configTagVersion1Properties);
-    config.setTag(configTagVersion1);
-
-    cluster.addConfig(config);
+    Config config = configFactory.createNew(cluster, "hive-site", configTagVersion1, configTagVersion1Properties, null);
     cluster.addDesiredConfig("admin", Collections.singleton(config));
 
     Map<String, Object> requestProps = new HashMap<String, Object>();
@@ -286,9 +284,7 @@ public class UpgradeResourceProviderHDP22Test {
     // Hive service checks have generated the ExecutionCommands by now.
     // Change the new desired config tag and verify execution command picks up new tag
     assertEquals(configTagVersion1, cluster.getDesiredConfigByType("hive-site").getTag());
-    final Config newConfig = new ConfigImpl("hive-site");
-    newConfig.setProperties(configTagVersion2Properties);
-    newConfig.setTag(configTagVersion2);
+    final Config newConfig = configFactory.createNew(cluster, "hive-site", configTagVersion2, configTagVersion2Properties, null);
     Set<Config> desiredConfigs = new HashSet<Config>() {
       {
         add(newConfig);

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index d69bdbe..844331e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -85,8 +85,8 @@ import org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryActio
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostState;
@@ -144,6 +144,7 @@ public class UpgradeResourceProviderTest {
   private StackDAO stackDAO;
   private AmbariMetaInfo ambariMetaInfo;
   private TopologyManager topologyManager;
+  private ConfigFactory configFactory;
 
   @Before
   public void before() throws Exception {
@@ -174,6 +175,7 @@ public class UpgradeResourceProviderTest {
 
     amc = injector.getInstance(AmbariManagementController.class);
     ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    configFactory = injector.getInstance(ConfigFactory.class);
 
     Field field = AmbariServer.class.getDeclaredField("clusterController");
     field.setAccessible(true);
@@ -1046,16 +1048,9 @@ public class UpgradeResourceProviderTest {
     }
 
 
-    Config config = new ConfigImpl("zoo.cfg");
-    config.setProperties(new HashMap<String, String>() {{
-      put("a", "b");
-    }});
-    config.setTag("abcdefg");
-
-    cluster.addConfig(config);
+    Config config = configFactory.createNew(cluster, "zoo.cfg", "abcdefg", Collections.singletonMap("a", "b"), null);
     cluster.addDesiredConfig("admin", Collections.singleton(config));
 
-
     Map<String, Object> requestProps = new HashMap<String, Object>();
     requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
     requestProps.put(UpgradeResourceProvider.UPGRADE_VERSION, "2.2.0.0");

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index 0163024..80a3bc5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -49,8 +49,7 @@ import org.apache.ambari.server.orm.entities.HostVersionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.RepositoryVersionState;
@@ -113,6 +112,9 @@ public class ComponentVersionCheckActionTest {
   @Inject
   private ServiceComponentHostFactory serviceComponentHostFactory;
 
+  @Inject
+  private ConfigFactory configFactory;
+
   @Before
   public void setup() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
@@ -399,18 +401,11 @@ public class ComponentVersionCheckActionTest {
     properties.put("a", "a1");
     properties.put("b", "b1");
 
-    Config c1 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, m_injector);
+    configFactory.createNew(cluster, "hdfs-site", "version1", properties, propertiesAttributes);
     properties.put("c", "c1");
     properties.put("d", "d1");
 
-    Config c2 = new ConfigImpl(cluster, "core-site", properties, propertiesAttributes, m_injector);
-    Config c3 = new ConfigImpl(cluster, "foo-site", properties, propertiesAttributes, m_injector);
-
-    cluster.addConfig(c1);
-    cluster.addConfig(c2);
-    cluster.addConfig(c3);
-    c1.persist();
-    c2.persist();
-    c3.persist();
+    configFactory.createNew(cluster, "core-site", "version1", properties, propertiesAttributes);
+    configFactory.createNew(cluster, "foo-site", "version1", properties, propertiesAttributes);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
index 7ab2856..92fa084 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -132,13 +132,10 @@ public class ConfigureActionTest {
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -196,16 +193,13 @@ public class ConfigureActionTest {
 
     // create a config for zoo.cfg with two values; one is a stack value and the
     // other is custom
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("tickTime", "2000");
         put("foo", "bar");
       }
     }, new HashMap<String, Map<String, String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -262,16 +256,13 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("copyIt", "10");
           put("moveIt", "10");
           put("deleteIt", "10");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -402,15 +393,12 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("zoo.server.csv", "c6401,c6402,  c6403");
       }
     }, new HashMap<String, Map<String, String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -468,16 +456,13 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("key_to_replace", "My New Cat");
         put("key_with_no_match", "WxyAndZ");
       }
     }, new HashMap<String, Map<String, String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -543,16 +528,13 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("existing", "This exists!");
         put("missing", null);
       }
     }, new HashMap<String, Map<String, String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -604,16 +586,12 @@ public class ConfigureActionTest {
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("fooKey", "barValue");
       }
     }, new HashMap<String, Map<String, String>>());
 
-    config.setTag("version2");
-    config.persist();
-
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -671,7 +649,7 @@ public class ConfigureActionTest {
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("set.key.1", "s1");
         put("set.key.2", "s2");
@@ -680,10 +658,6 @@ public class ConfigureActionTest {
       }
     }, new HashMap<String, Map<String, String>>());
 
-    config.setTag("version2");
-    config.persist();
-
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -769,7 +743,7 @@ public class ConfigureActionTest {
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("set.key.1", "s1");
         put("set.key.2", "s2");
@@ -778,10 +752,6 @@ public class ConfigureActionTest {
       }
     }, new HashMap<String, Map<String, String>>());
 
-    config.setTag("version2");
-    config.persist();
-
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -855,7 +825,7 @@ public class ConfigureActionTest {
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("replace.key.1", "r1");
         put("replace.key.2", "r2");
@@ -865,10 +835,6 @@ public class ConfigureActionTest {
       }
     }, new HashMap<String, Map<String, String>>());
 
-    config.setTag("version2");
-    config.persist();
-
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -951,7 +917,7 @@ public class ConfigureActionTest {
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
       {
         put("replace.key.1", "r1");
         put("replace.key.2", "r2");
@@ -961,10 +927,6 @@ public class ConfigureActionTest {
       }
     }, new HashMap<String, Map<String, String>>());
 
-    config.setTag("version2");
-    config.persist();
-
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1041,15 +1003,12 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("copy.key.1", "c1");
           put("copy.key.2", "c2");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1157,15 +1116,12 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("copy.key.1", "c1");
           put("copy.key.2", "c2");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1253,17 +1209,14 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("move.key.1", "m1");
           put("move.key.2", "m2");
           put("move.key.3", "m3");
           put("move.key.4", "m4");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1362,17 +1315,15 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2",
+        new HashMap<String, String>() {{
           put("initLimit", "10");
           put("move.key.1", "m1");
           put("move.key.2", "m2");
           put("move.key.3", "m3");
           put("move.key.4", "m4");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1466,17 +1417,14 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("delete.key.1", "d1");
           put("delete.key.2", "d2");
           put("delete.key.3", "d3");
           put("delete.key.4", "d4");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1567,17 +1515,14 @@ public class ConfigureActionTest {
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+    Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("delete.key.1", "d1");
           put("delete.key.2", "d2");
           put("delete.key.3", "d3");
           put("delete.key.4", "d4");
         }}, new HashMap<String, Map<String,String>>());
-    config.setTag("version2");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
     assertEquals(2, c.getConfigsByType("zoo.cfg").size());
 
@@ -1674,15 +1619,12 @@ public class ConfigureActionTest {
     // service properties will not run!
     installService(c, "ZOOKEEPER");
 
-    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+    Config config = cf.createNew(c, "zoo.cfg", "version1", new HashMap<String, String>() {
       {
         put("initLimit", "10");
       }
     }, new HashMap<String, Map<String, String>>());
-    config.setTag("version1");
-    config.persist();
 
-    c.addConfig(config);
     c.addDesiredConfig("user", Collections.singleton(config));
 
     // add a host component