You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/09/21 13:29:32 UTC

[10/21] ambari git commit: AMBARI-18420. Analyze and Optimize Ambari Server Unit Tests - Group 6.(vbrodetskyi)

AMBARI-18420. Analyze and Optimize Ambari Server Unit Tests - Group 6.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8425f1fa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8425f1fa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8425f1fa

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8425f1fa714718e32a05e8fcd005047c6de46451
Parents: a9bbf46
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Tue Sep 20 17:44:21 2016 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Tue Sep 20 17:44:21 2016 +0300

----------------------------------------------------------------------
 .../server/orm/dao/HostConfigMappingDAO.java    |   4 +-
 .../server/agent/TestHeartbeatMonitor.java      |  14 +-
 ...ThreadPoolExecutorCompletionServiceTest.java |   6 +-
 .../apache/ambari/server/orm/TestOrmImpl.java   | 116 ++++++---
 .../server/orm/dao/AlertDispatchDAOTest.java    | 235 ++++++++++++-------
 .../security/authorization/TestUsers.java       |   7 +
 .../upgrades/ConfigureActionTest.java           | 212 ++++++++++-------
 .../upgrades/UpgradeActionTest.java             | 153 ++++++------
 .../apache/ambari/server/state/ServiceTest.java | 124 ++++++----
 .../svccomphost/ServiceComponentHostTest.java   |  34 +--
 .../server/upgrade/UpgradeCatalog210Test.java   |   7 +
 11 files changed, 557 insertions(+), 355 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostConfigMappingDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostConfigMappingDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostConfigMappingDAO.java
index 77ff4a2..f8392d9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostConfigMappingDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostConfigMappingDAO.java
@@ -301,9 +301,7 @@ public class HostConfigMappingDAO {
 
         List<HostConfigMappingEntity> hostConfigMappingEntities = daoUtils.selectList(query);
 
-        List<HostConfigMappingEntity> list = daoUtils.selectList(query, hostEntity.getHostId());
-
-        for (HostConfigMappingEntity entity : list) {
+        for (HostConfigMappingEntity entity : hostConfigMappingEntities) {
           entityManagerProvider.get().remove(entity);
         }
         // Update the cache

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
index b6f5ec2..63460a2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
@@ -57,6 +57,7 @@ import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEve
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -66,6 +67,8 @@ import org.slf4j.LoggerFactory;
 
 import com.google.inject.Guice;
 import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import com.google.inject.persist.UnitOfWork;
 
 public class TestHeartbeatMonitor {
 
@@ -73,6 +76,7 @@ public class TestHeartbeatMonitor {
 
   private String hostname1 = "host1";
   private String hostname2 = "host2";
+  private String hostname3 = "host3";
   private String clusterName = "cluster1";
   private String serviceName = "HDFS";
   private int heartbeatMonitorWakeupIntervalMS = 30;
@@ -93,11 +97,17 @@ public class TestHeartbeatMonitor {
   @Before
   public void setup() throws Exception {
     cleanup();
+    injector.getInstance(UnitOfWork.class).begin();
   }
 
   @After
   public void teardown() {
+    injector.getInstance(UnitOfWork.class).end();
+  }
 
+  @AfterClass
+  public static void afterClass() throws Exception {
+    injector.getInstance(PersistService.class).stop();
   }
 
   private void cleanup() throws AmbariException {
@@ -154,7 +164,7 @@ public class TestHeartbeatMonitor {
       Thread.sleep(1);
     }
     assertEquals(fsm.getHost(hostname).getState(), HostState.HEARTBEAT_LOST);
-    classSetUp();
+    hm.shutdown();
   }
 
   @Test
@@ -468,7 +478,7 @@ public class TestHeartbeatMonitor {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.INSTALLING);
+            RepositoryVersionState.INSTALLING);
 
     Set<String> hostNames = new HashSet<String>(){{
       add(hostname1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/controller/test/BufferedThreadPoolExecutorCompletionServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/test/BufferedThreadPoolExecutorCompletionServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/test/BufferedThreadPoolExecutorCompletionServiceTest.java
index ede1f1f..ec99ab0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/test/BufferedThreadPoolExecutorCompletionServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/test/BufferedThreadPoolExecutorCompletionServiceTest.java
@@ -17,6 +17,8 @@
  */
 package org.apache.ambari.server.controller.test;
 
+import junit.framework.Assert;
+
 import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -26,12 +28,10 @@ import org.apache.ambari.server.controller.utilities.BufferedThreadPoolExecutorC
 import org.apache.ambari.server.controller.utilities.ScalingThreadPoolExecutor;
 import org.junit.Test;
 
-import junit.framework.Assert;
-
 public class BufferedThreadPoolExecutorCompletionServiceTest {
 
   private void longOp() throws InterruptedException {
-    Thread.sleep(3000);
+    Thread.sleep(1000);
     System.out.println("Completed " + Thread.currentThread());
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/orm/TestOrmImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/TestOrmImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/TestOrmImpl.java
index ce1c7e6..e72692a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/TestOrmImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/TestOrmImpl.java
@@ -18,14 +18,14 @@
 
 package org.apache.ambari.server.orm;
 
+import javax.persistence.EntityManager;
+import javax.persistence.RollbackException;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
 
-import javax.persistence.EntityManager;
-import javax.persistence.RollbackException;
-
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -48,8 +48,11 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.security.authorization.ResourceType;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -57,28 +60,86 @@ import org.slf4j.LoggerFactory;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
+import com.google.inject.persist.UnitOfWork;
 
 public class TestOrmImpl extends Assert {
   private static final Logger log = LoggerFactory.getLogger(TestOrmImpl.class);
 
   private static Injector injector;
-
-  @Before
-  public void setup() {
+  private static StackDAO stackDAO;
+  private static ResourceTypeDAO resourceTypeDAO;
+  private static ClusterDAO clusterDAO;
+  private static OrmTestHelper ormTestHelper;
+  private static ClusterServiceDAO clusterServiceDAO;
+  private static HostRoleCommandDAO hostRoleCommandDAO;
+  private static HostDAO hostDAO;
+  private static StageDAO stageDAO;
+  private static EntityManager entityManager;
+  private static RequestDAO requestDAO;
+
+  @BeforeClass
+  public static void classSetUp() throws Exception {
     injector = Guice.createInjector(new InMemoryDefaultTestModule());
     injector.getInstance(GuiceJpaInitializer.class);
 
     // required to load stack information into the DB
     injector.getInstance(AmbariMetaInfo.class);
+    stackDAO = injector.getInstance(StackDAO.class);
+    resourceTypeDAO = injector.getInstance(ResourceTypeDAO.class);
+    clusterDAO = injector.getInstance(ClusterDAO.class);
+    ormTestHelper = injector.getInstance(OrmTestHelper.class);
+    clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    hostRoleCommandDAO = injector.getInstance(HostRoleCommandDAO.class);
+    hostDAO = injector.getInstance(HostDAO.class);
+    stageDAO = injector.getInstance(StageDAO.class);
+    entityManager = injector.getInstance(EntityManager.class);
+    requestDAO = injector.getInstance(RequestDAO.class);
+
+    ormTestHelper.createDefaultData();
+    injector.getInstance(UnitOfWork.class).end();
+  }
 
-    injector.getInstance(OrmTestHelper.class).createDefaultData();
+  @Before
+  public void setup() {
+    injector.getInstance(UnitOfWork.class).begin();
   }
 
   @After
   public void teardown() {
+    injector.getInstance(UnitOfWork.class).end();
+    cleanup();
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
     injector.getInstance(PersistService.class).stop();
   }
 
+  private void cleanup() {
+
+
+    List<HostRoleCommandEntity> hostRoleCommandEntities = hostRoleCommandDAO.findAll();
+    if (hostRoleCommandEntities != null) {
+      for (HostRoleCommandEntity hostRoleCommandEntity : hostRoleCommandEntities) {
+        hostRoleCommandDAO.remove(hostRoleCommandEntity);
+      }
+    }
+
+    List<StageEntity> stageEntities = stageDAO.findAll();
+    if (stageEntities != null) {
+      for (StageEntity stageEntity : stageEntities) {
+        stageDAO.remove(stageEntity);
+      }
+    }
+
+    List<RequestEntity> requestEntities = requestDAO.findAll();
+    if (requestEntities != null) {
+      for (RequestEntity requestEntity : requestEntities) {
+        requestDAO.remove(requestEntity);
+      }
+    }
+  }
+
   /**
    * persistence provider is responsible for returning empty collection if relation doesn't exists
    */
@@ -86,8 +147,6 @@ public class TestOrmImpl extends Assert {
   public void testEmptyPersistentCollection() {
     String testClusterName = "test_cluster2";
 
-    StackDAO stackDAO = injector.getInstance(StackDAO.class);
-    ResourceTypeDAO resourceTypeDAO = injector.getInstance(ResourceTypeDAO.class);
 
     // create an admin resource to represent this cluster
     ResourceTypeEntity resourceTypeEntity = resourceTypeDAO.findById(ResourceType.CLUSTER.getId());
@@ -107,7 +166,6 @@ public class TestOrmImpl extends Assert {
     clusterEntity.setResource(resourceEntity);
     clusterEntity.setDesiredStack(stackEntity);
 
-    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
     clusterDAO.create(clusterEntity);
     clusterEntity = clusterDAO.findByName(clusterEntity.getClusterName());
 
@@ -120,7 +178,7 @@ public class TestOrmImpl extends Assert {
    */
   @Test(expected = RollbackException.class)
   public void testRollbackException() throws Throwable{
-    injector.getInstance(OrmTestHelper.class).performTransactionMarkedForRollback();
+    ormTestHelper.performTransactionMarkedForRollback();
   }
 
   /**
@@ -130,14 +188,14 @@ public class TestOrmImpl extends Assert {
   public void testSafeRollback() {
     String testClusterName = "don't save";
 
-    EntityManager entityManager = injector.getInstance(OrmTestHelper.class).getEntityManager();
+    EntityManager entityManager = ormTestHelper.getEntityManager();
     entityManager.getTransaction().begin();
     ClusterEntity clusterEntity = new ClusterEntity();
     clusterEntity.setClusterName(testClusterName);
     entityManager.persist(clusterEntity);
     entityManager.getTransaction().rollback();
 
-    assertNull("transaction was not rolled back", injector.getInstance(ClusterDAO.class).findByName(testClusterName));
+    assertNull("transaction was not rolled back", clusterDAO.findByName(testClusterName));
   }
 
   /**
@@ -145,7 +203,6 @@ public class TestOrmImpl extends Assert {
    */
   @Test
   public void testAutoIncrementedField() {
-    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
     Date currentTime = new Date();
     String serviceName = "MapReduce1";
     String clusterName = "test_cluster1";
@@ -161,8 +218,6 @@ public class TestOrmImpl extends Assert {
   }
 
   private void createService(Date currentTime, String serviceName, String clusterName) {
-    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
-    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
     ClusterEntity cluster = clusterDAO.findByName(clusterName);
 
     ClusterServiceEntity clusterServiceEntity = new ClusterServiceEntity();
@@ -185,7 +240,6 @@ public class TestOrmImpl extends Assert {
    */
   @Test
   public void testCascadeRemoveFail() {
-    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
     Date currentTime = new Date();
     String serviceName = "MapReduce2";
     String clusterName = "test_cluster1";
@@ -215,13 +269,12 @@ public class TestOrmImpl extends Assert {
     log.info("command '{}' - taskId '{}'", list.get(1).getRoleCommand(),
         list.get(1).getTaskId());
    assertTrue(list.get(0).getTaskId() < list.get(1).getTaskId());
+
   }
 
   @Test
   public void testFindHostsByStage() {
-    injector.getInstance(OrmTestHelper.class).createStageCommands();
-    HostDAO hostDAO = injector.getInstance(HostDAO.class);
-    StageDAO stageDAO = injector.getInstance(StageDAO.class);
+    ormTestHelper.createStageCommands();
     StageEntity stageEntity = stageDAO.findByActionId("1-1");
     log.info("StageEntity {} {}" + stageEntity.getRequestId() + " "
         + stageEntity.getStageId());
@@ -231,8 +284,7 @@ public class TestOrmImpl extends Assert {
 
   @Test
   public void testAbortHostRoleCommands() {
-    injector.getInstance(OrmTestHelper.class).createStageCommands();
-    HostRoleCommandDAO hostRoleCommandDAO = injector.getInstance(HostRoleCommandDAO.class);
+    ormTestHelper.createStageCommands();
     int result = hostRoleCommandDAO.updateStatusByRequestId(
         1L, HostRoleStatus.ABORTED, Arrays.asList(HostRoleStatus.QUEUED,
         HostRoleStatus.IN_PROGRESS, HostRoleStatus.PENDING));
@@ -249,17 +301,14 @@ public class TestOrmImpl extends Assert {
 
   @Test
   public void testFindStageByHostRole() {
-    injector.getInstance(OrmTestHelper.class).createStageCommands();
-    HostRoleCommandDAO hostRoleCommandDAO = injector.getInstance(HostRoleCommandDAO.class);
+    ormTestHelper.createStageCommands();
     List<HostRoleCommandEntity> list = hostRoleCommandDAO.findByHostRole("test_host1", 1L, 1L, Role.DATANODE.toString());
     assertEquals(1, list.size());
   }
 
   @Test
   public void testLastRequestId() {
-    injector.getInstance(OrmTestHelper.class).createStageCommands();
-    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
-    StageDAO stageDAO = injector.getInstance(StageDAO.class);
+    ormTestHelper.createStageCommands();
     RequestDAO requestDAO = injector.getInstance(RequestDAO.class);
 
     RequestEntity requestEntity = requestDAO.findByPK(1L);
@@ -284,12 +333,9 @@ public class TestOrmImpl extends Assert {
     assertEquals(1L, stageDAO.getLastRequestId());
   }
 
+  @Ignore
   @Test
   public void testConcurrentModification() throws InterruptedException {
-    final StackDAO stackDAO = injector.getInstance(StackDAO.class);
-    final ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
-    final ResourceTypeDAO resourceTypeDAO = injector.getInstance(ResourceTypeDAO.class);
-
     final StackEntity stackEntity = stackDAO.find("HDP", "2.2.0");
 
     // create an admin resource to represent this cluster
@@ -323,14 +369,14 @@ public class TestOrmImpl extends Assert {
         clusterEntity1 = clusterDAO.findById(clusterEntity1.getClusterId());
         assertEquals("anotherName", clusterEntity1.getClusterName());
 
-        injector.getInstance(EntityManager.class).clear();
+        entityManager.clear();
       }
     };
 
     thread.start();
     thread.join();
 
-    injector.getInstance(EntityManager.class).clear();
+    entityManager.clear();
 
     clusterEntity = clusterDAO.findById(clusterEntity.getClusterId());
     assertEquals("anotherName", clusterEntity.getClusterName());
@@ -339,14 +385,14 @@ public class TestOrmImpl extends Assert {
       @Override
       public void run() {
         clusterDAO.removeByName("anotherName");
-        injector.getInstance(EntityManager.class).clear();
+        entityManager.clear();
       }
     };
 
     thread.start();
     thread.join();
 
-    injector.getInstance(EntityManager.class).clear();
+    entityManager.clear();
     assertNull(clusterDAO.findById(clusterEntity.getClusterId()));
 
     List<ClusterEntity> result = clusterDAO.findAll();

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
index 2518e0a..5697ba0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDispatchDAOTest.java
@@ -32,9 +32,7 @@ import java.util.List;
 import java.util.Set;
 import java.util.UUID;
 
-import com.google.inject.persist.UnitOfWork;
 import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AlertNoticeRequest;
 import org.apache.ambari.server.controller.internal.AlertNoticeResourceProvider;
 import org.apache.ambari.server.controller.internal.PageRequestImpl;
@@ -54,9 +52,12 @@ import org.apache.ambari.server.orm.entities.AlertGroupEntity;
 import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
 import org.apache.ambari.server.orm.entities.AlertNoticeEntity;
 import org.apache.ambari.server.orm.entities.AlertTargetEntity;
+import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.state.AlertState;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.NotificationState;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
@@ -65,7 +66,9 @@ import org.apache.ambari.server.state.alert.Scope;
 import org.apache.ambari.server.state.alert.SourceType;
 import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import com.google.inject.Guice;
@@ -73,12 +76,6 @@ import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
 import com.google.inject.persist.UnitOfWork;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
 /**
  * Tests {@link AlertDispatchDAO}.
  */
@@ -86,27 +83,25 @@ public class AlertDispatchDAOTest {
 
   private final static String HOSTNAME = "c6401.ambari.apache.org";
 
-  private Clusters m_clusters;
-  private Cluster m_cluster;
-  private Injector m_injector;
-  private AlertDispatchDAO m_dao;
-  private AlertDefinitionDAO m_definitionDao;
-  private AlertsDAO m_alertsDao;
-  private OrmTestHelper m_helper;
-
-  private ServiceFactory m_serviceFactory;
-  private ServiceComponentFactory m_componentFactory;
-  private ServiceComponentHostFactory m_schFactory;
-  private AlertDaoHelper m_alertHelper;
-
-  /**
-   *
-   */
-  @Before
-  public void setup() throws Exception {
+  private static Clusters m_clusters;
+  private static Cluster m_cluster;
+  private static Injector m_injector;
+  private static AlertDispatchDAO m_dao;
+  private static AlertDefinitionDAO m_definitionDao;
+  private static AlertsDAO m_alertsDao;
+  private static OrmTestHelper m_helper;
+  private static HostComponentDesiredStateDAO hostComponentDesiredStateDAO;
+  private static HostComponentStateDAO hostComponentStateDAO;
+
+  private static ServiceFactory m_serviceFactory;
+  private static ServiceComponentFactory m_componentFactory;
+  private static ServiceComponentHostFactory m_schFactory;
+  private static AlertDaoHelper m_alertHelper;
+
+  @BeforeClass
+  public static void classSetUp() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
     m_injector.getInstance(GuiceJpaInitializer.class);
-    m_injector.getInstance(UnitOfWork.class).begin();
 
     m_dao = m_injector.getInstance(AlertDispatchDAO.class);
     m_alertsDao = m_injector.getInstance(AlertsDAO.class);
@@ -117,13 +112,85 @@ public class AlertDispatchDAOTest {
     m_schFactory = m_injector.getInstance(ServiceComponentHostFactory.class);
     m_clusters = m_injector.getInstance(Clusters.class);
     m_alertHelper = m_injector.getInstance(AlertDaoHelper.class);
+    hostComponentDesiredStateDAO = m_injector.getInstance(HostComponentDesiredStateDAO.class);
+    hostComponentStateDAO = m_injector.getInstance(HostComponentStateDAO.class);
 
     // !!! need a synchronous op for testing
     EventBusSynchronizer.synchronizeAmbariEventPublisher(m_injector);
 
     m_cluster = m_clusters.getClusterById(m_helper.createCluster());
     m_helper.initializeClusterWithStack(m_cluster);
+  }
+
+  /**
+   *
+   */
+  @Before
+  public void setup() throws Exception {
+    m_injector.getInstance(UnitOfWork.class).begin();
+  }
+
+  /**
+   * @throws Exception
+   */
+  @After
+  public void teardown() throws Exception {
+    m_injector.getInstance(UnitOfWork.class).end();
+    cleanup();
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
+    m_injector.getInstance(PersistService.class).stop();
+  }
+
+  private void cleanup() throws AmbariException {
+    List<AlertGroupEntity> alertGroupEntities = m_dao.findAllGroups();
+    if (alertGroupEntities != null) {
+      for (AlertGroupEntity alertGroupEntity : alertGroupEntities) {
+        m_dao.remove(alertGroupEntity);
+      }
+    }
 
+    List<AlertTargetEntity> alertTargetEntities = m_dao.findAllTargets();
+    if (alertTargetEntities != null) {
+      for (AlertTargetEntity alertTargetEntity : alertTargetEntities) {
+        m_dao.remove(alertTargetEntity);
+      }
+    }
+
+    List<HostComponentDesiredStateEntity> hostComponentDesiredStateEntities = hostComponentDesiredStateDAO.findAll();
+    if (hostComponentDesiredStateEntities != null) {
+      for (HostComponentDesiredStateEntity hcdse : hostComponentDesiredStateEntities) {
+        hostComponentDesiredStateDAO.remove(hcdse);
+      }
+    }
+
+    List<HostComponentStateEntity> hostComponentStateEntities = hostComponentStateDAO.findAll();
+    if (hostComponentStateEntities != null) {
+      for (HostComponentStateEntity hcse : hostComponentStateEntities) {
+        hostComponentStateDAO.remove(hcse);
+      }
+    }
+
+    List<AlertDefinitionEntity> alertDefinitionEntities = m_definitionDao.findAll();
+    if (alertDefinitionEntities != null) {
+      for (AlertDefinitionEntity alertDefinitionEntity : alertDefinitionEntities) {
+        m_definitionDao.remove(alertDefinitionEntity);
+      }
+    }
+
+    m_cluster.deleteAllServices();
+
+    List<Host> hosts = m_clusters.getHosts();
+    if (hosts != null) {
+      for (Host host : hosts) {
+        m_clusters.deleteHost(host.getHostName());
+      }
+    }
+  }
+
+  private void initTestData() throws Exception {
     Set<AlertTargetEntity> targets = createTargets();
 
     for (int i = 0; i < 10; i++) {
@@ -140,20 +207,11 @@ public class AlertDispatchDAOTest {
   }
 
   /**
-   * @throws Exception
-   */
-  @After
-  public void teardown() throws Exception {
-    m_injector.getInstance(UnitOfWork.class).end();
-    m_injector.getInstance(PersistService.class).stop();
-    m_injector = null;
-  }
-
-  /**
    *
    */
   @Test
   public void testFindAllTargets() throws Exception {
+    initTestData();
     List<AlertTargetEntity> targets = m_dao.findAllTargets();
     assertNotNull(targets);
     assertEquals(5, targets.size());
@@ -180,17 +238,21 @@ public class AlertDispatchDAOTest {
    *
    */
   @Test
-  public void testFindAllGlobalTargets() throws Exception {
+  public void testCreateAndFindAllGlobalTargets() throws Exception {
     List<AlertTargetEntity> targets = m_dao.findAllGlobalTargets();
     assertNotNull(targets);
     assertEquals(0, targets.size());
 
-    m_helper.createGlobalAlertTarget();
+    AlertTargetEntity target  = m_helper.createGlobalAlertTarget();
     m_helper.createGlobalAlertTarget();
     m_helper.createGlobalAlertTarget();
 
     targets = m_dao.findAllGlobalTargets();
+    assertTrue(target.isGlobal());
     assertEquals(3, targets.size());
+
+    m_dao.findTargetByName(target.getTargetName());
+    assertTrue( target.isGlobal() );
   }
 
   /**
@@ -198,6 +260,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testFindTargetByName() throws Exception {
+    initTestData();
     List<AlertTargetEntity> targets = m_dao.findAllTargets();
     assertNotNull(targets);
     AlertTargetEntity target = targets.get(3);
@@ -211,6 +274,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testFindAllGroups() throws Exception {
+    initTestData();
     List<AlertGroupEntity> groups = m_dao.findAllGroups();
     assertNotNull(groups);
     assertEquals(10, groups.size());
@@ -221,6 +285,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testFindGroupByName() throws Exception {
+    initTestData();
     List<AlertGroupEntity> groups = m_dao.findAllGroups();
     assertNotNull(groups);
     AlertGroupEntity group = groups.get(3);
@@ -236,6 +301,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testFindGroupsByIds() throws Exception {
+    initTestData();
     List<AlertGroupEntity> groups = m_dao.findAllGroups();
     assertNotNull(groups);
     assertEquals(10, groups.size());
@@ -269,43 +335,7 @@ public class AlertDispatchDAOTest {
     assertEquals(group.getAlertDefinitions(), actual.getAlertDefinitions());
   }
 
-  /**
-   *
-   */
-  @Test
-  public void testGroupDefinitions() throws Exception {
-    List<AlertDefinitionEntity> definitions = createDefinitions();
 
-    AlertGroupEntity group = m_helper.createAlertGroup(
-        m_cluster.getClusterId(), null);
-
-    group = m_dao.findGroupById(group.getGroupId());
-    assertNotNull(group);
-
-    for (AlertDefinitionEntity definition : definitions) {
-      group.addAlertDefinition(definition);
-    }
-
-    m_dao.merge(group);
-
-    group = m_dao.findGroupByName(group.getGroupName());
-    assertEquals(definitions.size(), group.getAlertDefinitions().size());
-
-    for (AlertDefinitionEntity definition : definitions) {
-      assertTrue(group.getAlertDefinitions().contains(definition));
-    }
-
-    m_definitionDao.refresh(definitions.get(0));
-    m_definitionDao.remove(definitions.get(0));
-    definitions.remove(0);
-
-    group = m_dao.findGroupByName(group.getGroupName());
-    assertEquals(definitions.size(), group.getAlertDefinitions().size());
-
-    for (AlertDefinitionEntity definition : definitions) {
-      assertTrue(group.getAlertDefinitions().contains(definition));
-    }
-  }
 
   /**
    *
@@ -343,18 +373,6 @@ public class AlertDispatchDAOTest {
    *
    */
   @Test
-  public void testCreateGlobalTarget() throws Exception {
-    AlertTargetEntity target = m_helper.createGlobalAlertTarget();
-    assertTrue( target.isGlobal() );
-
-    target = m_dao.findTargetByName(target.getTargetName());
-    assertTrue( target.isGlobal() );
-  }
-
-  /**
-   *
-   */
-  @Test
   public void testGlobalTargetAssociations() throws Exception {
     AlertGroupEntity group = m_helper.createAlertGroup(
         m_cluster.getClusterId(), null);
@@ -598,6 +616,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testFindTargetsViaGroupsByDefinition() throws Exception {
+    initTestData();
     List<AlertDefinitionEntity> definitions = createDefinitions();
     AlertGroupEntity group = m_helper.createAlertGroup(
         m_cluster.getClusterId(), null);
@@ -849,6 +868,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testFindDefaultGroup() throws Exception {
+    initTestData();
     List<AlertGroupEntity> groups = m_dao.findAllGroups();
     assertNotNull(groups);
     assertEquals(10, groups.size());
@@ -873,6 +893,7 @@ public class AlertDispatchDAOTest {
    */
   @Test
   public void testDefaultGroupAutomaticCreation() throws Exception {
+    initTestData();
     List<AlertGroupEntity> groups = m_dao.findAllGroups();
     assertNotNull(groups);
     assertEquals(10, groups.size());
@@ -917,6 +938,7 @@ public class AlertDispatchDAOTest {
    */
   @Test(expected = AmbariException.class)
   public void testDefaultGroupInvalidServiceNoCreation() throws Exception {
+    initTestData();
     List<AlertGroupEntity> groups = m_dao.findAllGroups();
     assertNotNull(groups);
     assertEquals(10, groups.size());
@@ -947,6 +969,7 @@ public class AlertDispatchDAOTest {
     } finally {
       // assert no group was added
       assertEquals(12, m_dao.findAllGroups().size());
+      teardown();
     }
   }
 
@@ -999,4 +1022,42 @@ public class AlertDispatchDAOTest {
 
     return targets;
   }
+
+  /**
+   *
+   */
+  @Test
+  public void testGroupDefinitions() throws Exception {
+    List<AlertDefinitionEntity> definitions = createDefinitions();
+
+    AlertGroupEntity group = m_helper.createAlertGroup(
+            m_cluster.getClusterId(), null);
+
+    group = m_dao.findGroupById(group.getGroupId());
+    assertNotNull(group);
+
+    for (AlertDefinitionEntity definition : definitions) {
+      group.addAlertDefinition(definition);
+    }
+
+    m_dao.merge(group);
+
+    group = m_dao.findGroupByName(group.getGroupName());
+    assertEquals(definitions.size(), group.getAlertDefinitions().size());
+
+    for (AlertDefinitionEntity definition : definitions) {
+      assertTrue(group.getAlertDefinitions().contains(definition));
+    }
+
+    m_definitionDao.refresh(definitions.get(0));
+    m_definitionDao.remove(definitions.get(0));
+    definitions.remove(0);
+
+    group = m_dao.findGroupByName(group.getGroupName());
+    assertEquals(definitions.size(), group.getAlertDefinitions().size());
+
+    for (AlertDefinitionEntity definition : definitions) {
+      assertTrue(group.getAlertDefinitions().contains(definition));
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/TestUsers.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/TestUsers.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/TestUsers.java
index 9b9fee5..a80cd03 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/TestUsers.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/TestUsers.java
@@ -43,6 +43,7 @@ import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -55,6 +56,7 @@ import org.springframework.security.crypto.password.PasswordEncoder;
 
 import com.google.inject.Guice;
 import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
 
 public class TestUsers {
   private static Injector injector;
@@ -129,6 +131,11 @@ public class TestUsers {
 
   }
 
+  @AfterClass
+  public static void afterClass() throws Exception {
+    injector.getInstance(PersistService.class).stop();
+  }
+
   private void cleanup() throws AmbariException {
     for (User user : users.getAllUsers()) {
       if (!user.getUserName().equals("admin")) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
index be0aeef..bf72e0c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -17,11 +17,17 @@
  */
 package org.apache.ambari.server.serveraction.upgrades;
 
-import com.google.gson.Gson;
-import com.google.inject.Guice;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ServiceNotFoundException;
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
@@ -32,11 +38,14 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
+import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.state.Cluster;
@@ -57,19 +66,15 @@ import org.apache.ambari.server.state.stack.upgrade.PropertyKeyState;
 import org.apache.ambari.server.state.stack.upgrade.TransferCoercionType;
 import org.apache.ambari.server.state.stack.upgrade.TransferOperation;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import com.google.gson.Gson;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
 
 /**
  * Tests upgrade-related server side actions
@@ -81,48 +86,94 @@ public class ConfigureActionTest {
   private static final StackId HDP_211_STACK = new StackId("HDP-2.1.1");
   private static final StackId HDP_220_STACK = new StackId("HDP-2.2.0");
 
-  private Injector m_injector;
-
-  @Inject
-  private OrmTestHelper m_helper;
-
-  @Inject
-  private RepositoryVersionDAO repoVersionDAO;
-
-  @Inject
-  private HostVersionDAO hostVersionDAO;
-
-  @Inject
-  private HostRoleCommandFactory hostRoleCommandFactory;
-
-  @Inject
-  private ServiceFactory serviceFactory;
+  private static Injector m_injector;
+  private static OrmTestHelper m_helper;
+  private static RepositoryVersionDAO repoVersionDAO;
+  private static HostVersionDAO hostVersionDAO;
+  private static HostRoleCommandFactory hostRoleCommandFactory;
+  private static ServiceFactory serviceFactory;
+  private static ConfigHelper m_configHelper;
+  private static Clusters clusters;
+  private static ClusterVersionDAO clusterVersionDAO;
+  private static ConfigFactory cf;
+  private static ConfigureAction action;
+  private static HostDAO hostDAO;
+
+  @BeforeClass
+  public static void classSetUp() throws Exception {
+    m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    m_injector.getInstance(GuiceJpaInitializer.class);
 
-  @Inject
-  ConfigHelper m_configHelper;
+    m_helper = m_injector.getInstance(OrmTestHelper.class);
+    repoVersionDAO = m_injector.getInstance(RepositoryVersionDAO.class);
+    hostVersionDAO = m_injector.getInstance(HostVersionDAO.class);
+    hostRoleCommandFactory = m_injector.getInstance(HostRoleCommandFactory.class);
+    serviceFactory = m_injector.getInstance(ServiceFactory.class);
+    m_configHelper = m_injector.getInstance(ConfigHelper.class);
+    clusters = m_injector.getInstance(Clusters.class);
+    clusterVersionDAO = m_injector.getInstance(ClusterVersionDAO.class);
+    cf = m_injector.getInstance(ConfigFactory.class);
+    action = m_injector.getInstance(ConfigureAction.class);
+    hostDAO = m_injector.getInstance(HostDAO.class);
+  }
 
   @Before
   public void setup() throws Exception {
-    m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
-    m_injector.getInstance(GuiceJpaInitializer.class);
-    m_injector.injectMembers(this);
+
   }
 
   @After
   public void teardown() throws Exception {
+    cleanup();
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
     m_injector.getInstance(PersistService.class).stop();
   }
 
+  private void cleanup() throws AmbariException {
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    List<ClusterVersionEntity> clusterVersionEntities = clusterVersionDAO.findAll();
+    if (clusterVersionEntities != null) {
+      for (ClusterVersionEntity cve : clusterVersionEntities) {
+        clusterVersionDAO.remove(cve);
+      }
+    }
+
+    List<RepositoryVersionEntity> repositoryVersionEntities = repoVersionDAO.findAll();
+    if (repositoryVersionEntities != null) {
+      for (RepositoryVersionEntity rve : repositoryVersionEntities) {
+        repoVersionDAO.remove(rve);
+      }
+    }
+
+    List<HostVersionEntity> hostVersionEntities = hostVersionDAO.findAll();
+    if (clusterVersionEntities != null) {
+      for (HostVersionEntity hve : hostVersionEntities) {
+        hostVersionDAO.remove(hve);
+      }
+    }
+
+    for (String clusterName : clusterMap.keySet()) {
+      clusters.deleteCluster(clusterName);
+    }
+
+    for (Host host : clusters.getHosts()) {
+      clusters.deleteHost(host.getHostName());
+    }
+  }
+
   @Test
   public void testConfigActionUpgradeAcrossStack() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
         }}, new HashMap<String, Map<String,String>>());
@@ -156,7 +207,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -180,14 +231,13 @@ public class ConfigureActionTest {
   public void testDeletePreserveChanges() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
 
     // create a config for zoo.cfg with two values; one is a stack value and the
     // other is custom
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("tickTime", "2000");
@@ -226,7 +276,7 @@ public class ConfigureActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -250,11 +300,10 @@ public class ConfigureActionTest {
   public void testConfigTransferCopy() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("copyIt", "10");
@@ -332,7 +381,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -391,11 +440,10 @@ public class ConfigureActionTest {
   public void testCoerceValueOnCopy() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("zoo.server.csv", "c6401,c6402,  c6403");
@@ -436,7 +484,7 @@ public class ConfigureActionTest {
 
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -458,11 +506,10 @@ public class ConfigureActionTest {
   public void testValueReplacement() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("key_to_replace", "My New Cat");
@@ -508,7 +555,7 @@ public class ConfigureActionTest {
 
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -534,11 +581,10 @@ public class ConfigureActionTest {
   public void testValueReplacementWithMissingConfigurations() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("existing", "This exists!");
@@ -578,7 +624,7 @@ public class ConfigureActionTest {
 
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -595,12 +641,11 @@ public class ConfigureActionTest {
   public void testMultipleKeyValuesPerTask() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("fooKey", "barValue");
@@ -641,7 +686,7 @@ public class ConfigureActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -663,12 +708,11 @@ public class ConfigureActionTest {
   public void testAllowedSet() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("set.key.1", "s1");
@@ -736,7 +780,7 @@ public class ConfigureActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -762,12 +806,11 @@ public class ConfigureActionTest {
   public void testDisallowedSet() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("set.key.1", "s1");
@@ -825,7 +868,7 @@ public class ConfigureActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -849,12 +892,11 @@ public class ConfigureActionTest {
   public void testAllowedReplacment() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("replace.key.1", "r1");
@@ -924,7 +966,7 @@ public class ConfigureActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -946,12 +988,11 @@ public class ConfigureActionTest {
   public void testDisallowedReplacment() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setCurrentStackVersion(HDP_211_STACK);
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("replace.key.1", "r1");
@@ -1016,7 +1057,7 @@ public class ConfigureActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1038,11 +1079,10 @@ public class ConfigureActionTest {
   public void testAllowedTransferCopy() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("copy.key.1", "c1");
@@ -1126,7 +1166,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1155,11 +1195,10 @@ public class ConfigureActionTest {
   public void testDisallowedTransferCopy() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("copy.key.1", "c1");
@@ -1228,7 +1267,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1252,11 +1291,10 @@ public class ConfigureActionTest {
   public void testAllowedTransferMove() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("move.key.1", "m1");
@@ -1332,7 +1370,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1362,11 +1400,10 @@ public class ConfigureActionTest {
   public void testDisallowedTransferMove() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("move.key.1", "m1");
@@ -1436,7 +1473,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1467,11 +1504,10 @@ public class ConfigureActionTest {
   public void testAllowedTransferDelete() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("delete.key.1", "d1");
@@ -1543,7 +1579,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1569,11 +1605,10 @@ public class ConfigureActionTest {
   public void testDisallowedTransferDelete() throws Exception {
     makeUpgradeCluster();
 
-    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    Cluster c = clusters.getCluster("c1");
     assertEquals(1, c.getConfigsByType("zoo.cfg").size());
 
     c.setDesiredStackVersion(HDP_220_STACK);
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
           put("initLimit", "10");
           put("delete.key.1", "d1");
@@ -1640,7 +1675,7 @@ public class ConfigureActionTest {
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
         executionCommand));
 
-    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+
     action.setExecutionCommand(executionCommand);
     action.setHostRoleCommand(hostRoleCommand);
 
@@ -1666,7 +1701,6 @@ public class ConfigureActionTest {
     String clusterName = "c1";
     String hostName = "h1";
 
-    Clusters clusters = m_injector.getInstance(Clusters.class);
     clusters.addCluster(clusterName, HDP_220_STACK);
 
     StackDAO stackDAO = m_injector.getInstance(StackDAO.class);
@@ -1682,7 +1716,6 @@ public class ConfigureActionTest {
     // service properties will not run!
     installService(c, "ZOOKEEPER");
 
-    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
     Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
       {
         put("initLimit", "10");
@@ -1723,7 +1756,6 @@ public class ConfigureActionTest {
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
         RepositoryVersionState.CURRENT);
 
-    HostDAO hostDAO = m_injector.getInstance(HostDAO.class);
 
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index cb63b0f..f1013c2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -47,6 +47,8 @@ import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
 import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
+import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
+import org.apache.ambari.server.orm.dao.HostComponentStateDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
@@ -55,6 +57,8 @@ import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.dao.UpgradeDAO;
 import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
+import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.RequestEntity;
@@ -81,10 +85,10 @@ import org.apache.ambari.server.state.stack.UpgradePack;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.gson.Gson;
@@ -92,7 +96,6 @@ import com.google.gson.JsonArray;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
 import com.google.inject.Guice;
-import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
 import com.google.inject.persist.UnitOfWork;
@@ -129,13 +132,17 @@ public class UpgradeActionTest {
   private static RequestDAO requestDAO;
   private static UpgradeDAO upgradeDAO;
   private static ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO;
+  private static HostComponentDesiredStateDAO hostComponentDesiredStateDAO;
+  private static HostComponentStateDAO hostComponentStateDAO;
+  private static StackDAO stackDAO;
+  private static AmbariMetaInfo ambariMetaInfo;
+  private static FinalizeUpgradeAction finalizeUpgradeAction;
 
   @BeforeClass
   public static void classSetUp() throws NoSuchFieldException, IllegalAccessException {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
     m_injector.getInstance(GuiceJpaInitializer.class);
 
-    amc = m_injector.getInstance(AmbariManagementController.class);
     m_helper = m_injector.getInstance(OrmTestHelper.class);
     repoVersionDAO = m_injector.getInstance(RepositoryVersionDAO.class);
     clusters = m_injector.getInstance(Clusters.class);
@@ -149,22 +156,35 @@ public class UpgradeActionTest {
     requestDAO = m_injector.getInstance(RequestDAO.class);
     upgradeDAO = m_injector.getInstance(UpgradeDAO.class);
     serviceComponentDesiredStateDAO = m_injector.getInstance(ServiceComponentDesiredStateDAO.class);
-
-    m_injector.getInstance(UnitOfWork.class).begin();
-
-    // Initialize AmbariManagementController
+    hostComponentDesiredStateDAO = m_injector.getInstance(HostComponentDesiredStateDAO.class);
+    hostComponentStateDAO = m_injector.getInstance(HostComponentStateDAO.class);
     amc = m_injector.getInstance(AmbariManagementController.class);
+    stackDAO = m_injector.getInstance(StackDAO.class);
+    ambariMetaInfo = m_injector.getInstance(AmbariMetaInfo.class);
+    finalizeUpgradeAction = m_injector.getInstance(FinalizeUpgradeAction.class);
 
     Field field = AmbariServer.class.getDeclaredField("clusterController");
     field.setAccessible(true);
     field.set(null, amc);
   }
 
+  @Before
+  public void setup() throws Exception {
+    cleanup();
+    m_injector.getInstance(UnitOfWork.class).begin();
+  }
+
   @After
   public void teardown() throws Exception {
+    m_injector.getInstance(UnitOfWork.class).end();
     cleanup();
   }
 
+  @AfterClass
+  public static void afterClass() throws Exception {
+    m_injector.getInstance(PersistService.class).stop();
+  }
+
   private void cleanup() throws AmbariException {
     Map<String, Cluster> clusterMap = clusters.getClusters();
 
@@ -189,6 +209,20 @@ public class UpgradeActionTest {
       }
     }
 
+    List<HostComponentDesiredStateEntity> hostComponentDesiredStateEntities = hostComponentDesiredStateDAO.findAll();
+    if (hostComponentDesiredStateEntities != null) {
+      for (HostComponentDesiredStateEntity hcdse : hostComponentDesiredStateEntities) {
+        hostComponentDesiredStateDAO.remove(hcdse);
+      }
+    }
+
+    List<HostComponentStateEntity> hostComponentStateEntities = hostComponentStateDAO.findAll();
+    if (hostComponentStateEntities != null) {
+      for (HostComponentStateEntity hcse : hostComponentStateEntities) {
+        hostComponentStateDAO.remove(hcse);
+      }
+    }
+
     for (String clusterName : clusterMap.keySet()) {
       clusters.deleteCluster(clusterName);
     }
@@ -228,7 +262,7 @@ public class UpgradeActionTest {
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
-        RepositoryVersionState.CURRENT);
+            RepositoryVersionState.CURRENT);
 
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));
@@ -287,7 +321,7 @@ public class UpgradeActionTest {
 
     // Create a host version for the mid repo in CURRENT
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
-        RepositoryVersionState.CURRENT);
+            RepositoryVersionState.CURRENT);
 
     // Create a host version for the target repo in UPGRADED
     HostVersionEntity entityTarget = new HostVersionEntity();
@@ -300,10 +334,8 @@ public class UpgradeActionTest {
   private void makeUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
     String hostName = "h1";
 
-    Clusters clusters = m_injector.getInstance(Clusters.class);
     clusters.addCluster(clusterName, sourceStack);
 
-    StackDAO stackDAO = m_injector.getInstance(StackDAO.class);
     StackEntity stackEntitySource = stackDAO.find(sourceStack.getStackName(), sourceStack.getStackVersion());
     StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
     assertNotNull(stackEntitySource);
@@ -347,8 +379,8 @@ public class UpgradeActionTest {
 
     // Create the new repo version
     String urlInfo = "[{'repositories':["
-        + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetStack.getStackId() + "'}"
-        + "], 'OperatingSystems/os_type':'redhat6'}]";
+            + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetStack.getStackId() + "'}"
+            + "], 'OperatingSystems/os_type':'redhat6'}]";
 
     repoVersionDAO.create(stackEntityTarget, targetRepo, String.valueOf(System.currentTimeMillis()), urlInfo);
 
@@ -358,22 +390,22 @@ public class UpgradeActionTest {
     c.setCurrentStackVersion(targetStack);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
-        RepositoryVersionState.CURRENT);
+            RepositoryVersionState.CURRENT);
 
     // create a single host with the UPGRADED HostVersionEntity
     HostDAO hostDAO = m_injector.getInstance(HostDAO.class);
 
     RepositoryVersionEntity repositoryVersionEntity = repoVersionDAO.findByStackAndVersion(
-        targetStack, targetRepo);
+            targetStack, targetRepo);
 
     HostVersionEntity entity = new HostVersionEntity(hostDAO.findByName(hostName),
-        repositoryVersionEntity, RepositoryVersionState.INSTALLED);
+            repositoryVersionEntity, RepositoryVersionState.INSTALLED);
 
     hostVersionDAO.create(entity);
 
     // verify the UPGRADED host versions were created successfully
     List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion(clusterName,
-        targetStack, targetRepo);
+            targetStack, targetRepo);
 
     assertEquals(1, hostVersions.size());
     assertEquals(RepositoryVersionState.INSTALLED, hostVersions.get(0).getState());
@@ -382,10 +414,8 @@ public class UpgradeActionTest {
   private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
     String hostName = "h1";
 
-    Clusters clusters = m_injector.getInstance(Clusters.class);
     clusters.addCluster(clusterName, sourceStack);
 
-    StackDAO stackDAO = m_injector.getInstance(StackDAO.class);
     StackEntity stackEntitySource = stackDAO.find(sourceStack.getStackName(), sourceStack.getStackVersion());
     StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
 
@@ -415,8 +445,8 @@ public class UpgradeActionTest {
 
     // Create the new repo version
     String urlInfo = "[{'repositories':["
-        + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetRepo + "'}"
-        + "], 'OperatingSystems/os_type':'redhat6'}]";
+            + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetRepo + "'}"
+            + "], 'OperatingSystems/os_type':'redhat6'}]";
     repoVersionDAO.create(stackEntityTarget, targetRepo, String.valueOf(System.currentTimeMillis()), urlInfo);
 
     // Start upgrading the newer repo
@@ -424,7 +454,7 @@ public class UpgradeActionTest {
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
-        RepositoryVersionState.CURRENT);
+            RepositoryVersionState.CURRENT);
 
     HostDAO hostDAO = m_injector.getInstance(HostDAO.class);
 
@@ -451,9 +481,7 @@ public class UpgradeActionTest {
     // That upgrade pack has changes for ZK and NameNode.
     String upgradePackName = "upgrade_nonrolling_new_stack";
 
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
-
-    Map<String, UpgradePack> packs = metaInfo.getUpgradePacks(sourceStack.getStackName(), sourceStack.getStackVersion());
+    Map<String, UpgradePack> packs = ambariMetaInfo.getUpgradePacks(sourceStack.getStackName(), sourceStack.getStackVersion());
     Assert.assertTrue(packs.containsKey(upgradePackName));
 
     makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
@@ -540,11 +568,10 @@ public class UpgradeActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
@@ -599,11 +626,10 @@ public class UpgradeActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.FAILED.name(), report.getStatus());
     assertTrue(report.getStdErr().contains(FinalizeUpgradeAction.PREVIOUS_UPGRADE_NOT_COMPLETED_MSG));
@@ -619,12 +645,11 @@ public class UpgradeActionTest {
     makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
     // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
     Host host = clusters.getHost("h1");
     Cluster cluster = clusters.getCluster(clusterName);
 
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
+    RepositoryInfo repo = ambariMetaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
     assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
     verifyBaseRepoURL(helper, cluster, host, HDP_211_CENTOS6_REPO_URL);
 
@@ -640,11 +665,10 @@ public class UpgradeActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
@@ -682,13 +706,12 @@ public class UpgradeActionTest {
     }
 
     // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
     Host host = clusters.getHost("h1");
     Cluster cluster = clusters.getCluster(clusterName);
 
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(),
-        sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
+    RepositoryInfo repo = ambariMetaInfo.getRepository(sourceStack.getStackName(),
+            sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
     assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
     verifyBaseRepoURL(helper, cluster, host, HDP_211_CENTOS6_REPO_URL);
 
@@ -704,11 +727,10 @@ public class UpgradeActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
   }
@@ -755,11 +777,10 @@ public class UpgradeActionTest {
 
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
@@ -833,11 +854,10 @@ public class UpgradeActionTest {
       assertTrue(hve.getState() == RepositoryVersionState.INSTALLED);
     }
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
@@ -900,7 +920,7 @@ public class UpgradeActionTest {
     // inject an unhappy path where the cluster repo version is still UPGRADING
     // even though all of the hosts are UPGRADED
     ClusterVersionEntity upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
-        clusterName, HDP_22_STACK, targetRepo);
+            clusterName, HDP_22_STACK, targetRepo);
 
     upgradingClusterVersion.setState(RepositoryVersionState.INSTALLING);
     upgradingClusterVersion = clusterVersionDAO.merge(upgradingClusterVersion);
@@ -931,11 +951,10 @@ public class UpgradeActionTest {
 
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
@@ -958,7 +977,6 @@ public class UpgradeActionTest {
     makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
     // Verify the repo before calling Finalize
-    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
     Host host = clusters.getHost("h1");
     Cluster cluster = clusters.getCluster(clusterName);
@@ -997,12 +1015,12 @@ public class UpgradeActionTest {
 
     // verify that no history exist exists yet
     List<ServiceComponentHistoryEntity> historyEntites = serviceComponentDesiredStateDAO.findHistory(
-        cluster.getClusterId(), nnSCH.getServiceName(),
-        nnSCH.getServiceComponentName());
+            cluster.getClusterId(), nnSCH.getServiceName(),
+            nnSCH.getServiceComponentName());
 
     assertEquals(0, historyEntites.size());
 
-    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
+    RepositoryInfo repo = ambariMetaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
     assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
     verifyBaseRepoURL(helper, cluster, host, HDP_211_CENTOS6_REPO_URL);
 
@@ -1020,11 +1038,10 @@ public class UpgradeActionTest {
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
 
-    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
-    action.setExecutionCommand(executionCommand);
-    action.setHostRoleCommand(hostRoleCommand);
+    finalizeUpgradeAction.setExecutionCommand(executionCommand);
+    finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
 
-    CommandReport report = action.execute(null);
+    CommandReport report = finalizeUpgradeAction.execute(null);
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
@@ -1033,7 +1050,7 @@ public class UpgradeActionTest {
 
     // ensure that history now exists
     historyEntites = serviceComponentDesiredStateDAO.findHistory(cluster.getClusterId(),
-        nnSCH.getServiceName(), nnSCH.getServiceComponentName());
+            nnSCH.getServiceName(), nnSCH.getServiceComponentName());
 
     assertEquals(1, historyEntites.size());
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8425f1fa/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceTest.java
index 79fe0e9..897e955 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.state;
 
+import junit.framework.Assert;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -26,8 +27,6 @@ import static org.junit.Assert.fail;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.ServiceResponse;
@@ -36,7 +35,9 @@ import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.ClusterServiceDAO;
 import org.apache.ambari.server.orm.entities.ClusterServiceEntity;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import com.google.inject.Guice;
@@ -45,25 +46,25 @@ import com.google.inject.persist.PersistService;
 
 public class ServiceTest {
 
-  private Clusters clusters;
-  private Cluster cluster;
-  private String clusterName;
-  private Injector injector;
-  private ServiceFactory serviceFactory;
-  private ServiceComponentFactory serviceComponentFactory;
-  private ServiceComponentHostFactory serviceComponentHostFactory;
-  private AmbariMetaInfo metaInfo;
-
-  @Before
-  public void setup() throws Exception {
+  private static Clusters clusters;
+  private static Cluster cluster;
+  private static String clusterName;
+  private static Injector injector;
+  private static ServiceFactory serviceFactory;
+  private static ServiceComponentFactory serviceComponentFactory;
+  private static ServiceComponentHostFactory serviceComponentHostFactory;
+  private static AmbariMetaInfo metaInfo;
+
+  @BeforeClass
+  public static void classSetUp() throws Exception {
     injector = Guice.createInjector(new InMemoryDefaultTestModule());
     injector.getInstance(GuiceJpaInitializer.class);
     clusters = injector.getInstance(Clusters.class);
     serviceFactory = injector.getInstance(ServiceFactory.class);
     serviceComponentFactory = injector.getInstance(
-        ServiceComponentFactory.class);
+            ServiceComponentFactory.class);
     serviceComponentHostFactory = injector.getInstance(
-        ServiceComponentHostFactory.class);
+            ServiceComponentHostFactory.class);
     metaInfo = injector.getInstance(AmbariMetaInfo.class);
     clusterName = "foo";
     clusters.addCluster(clusterName, new StackId("HDP-0.1"));
@@ -71,11 +72,67 @@ public class ServiceTest {
     Assert.assertNotNull(cluster);
   }
 
+  @Before
+  public void setup() throws Exception {
+
+  }
+
   @After
   public void teardown() throws AmbariException {
+    //injector.getInstance(PersistService.class).stop();
+    cleanup();
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
     injector.getInstance(PersistService.class).stop();
   }
 
+  private void cleanup() throws AmbariException {
+    cluster.deleteAllServices();
+  }
+
+  @Test
+  public void testCanBeRemoved() throws Exception{
+    Service service = cluster.addService("HDFS");
+
+    for (State state : State.values()) {
+      service.setDesiredState(state);
+      // service does not have any components, so it can be removed,
+      // even if the service is in non-removable state.
+      org.junit.Assert.assertTrue(service.canBeRemoved());
+    }
+
+    ServiceComponent component = service.addServiceComponent("NAMENODE");
+
+    // component can be removed
+    component.setDesiredState(State.INSTALLED);
+
+    for (State state : State.values()) {
+      service.setDesiredState(state);
+      // should always be true if the sub component can be removed
+      org.junit.Assert.assertTrue(service.canBeRemoved());
+    }
+
+    // can remove a STARTED component as whether a service can be removed
+    // is ultimately decided based on if the host components can be removed
+    component.setDesiredState(State.INSTALLED);
+    addHostToCluster("h1", service.getCluster().getClusterName());
+    ServiceComponentHost sch = serviceComponentHostFactory.createNew(component, "h1");
+    component.addServiceComponentHost(sch);
+    sch.setDesiredState(State.STARTED);
+    sch.setState(State.STARTED);
+
+    for (State state : State.values()) {
+      service.setDesiredState(state);
+      // should always be false if the sub component can not be removed
+      org.junit.Assert.assertFalse(service.canBeRemoved());
+    }
+
+    sch.setDesiredState(State.INSTALLED);
+    sch.setState(State.INSTALLED);
+  }
+
   @Test
   public void testCreateService() throws AmbariException {
     String serviceName = "HDFS";
@@ -260,43 +317,6 @@ public class ServiceTest {
 
   }
 
-  @Test
-  public void testCanBeRemoved() throws Exception{
-    Service service = cluster.addService("HDFS");
-
-    for (State state : State.values()) {
-      service.setDesiredState(state);
-      // service does not have any components, so it can be removed,
-      // even if the service is in non-removable state.
-      org.junit.Assert.assertTrue(service.canBeRemoved());
-    }
-
-    ServiceComponent component = service.addServiceComponent("NAMENODE");
-
-    // component can be removed
-    component.setDesiredState(State.INSTALLED);
-
-    for (State state : State.values()) {
-      service.setDesiredState(state);
-      // should always be true if the sub component can be removed
-      org.junit.Assert.assertTrue(service.canBeRemoved());
-    }
-
-    // can remove a STARTED component as whether a service can be removed
-    // is ultimately decided based on if the host components can be removed
-    component.setDesiredState(State.INSTALLED);
-    addHostToCluster("h1", service.getCluster().getClusterName());
-    ServiceComponentHost sch = serviceComponentHostFactory.createNew(component, "h1");
-    component.addServiceComponentHost(sch);
-    sch.setDesiredState(State.STARTED);
-    sch.setState(State.STARTED);
-
-    for (State state : State.values()) {
-      service.setDesiredState(state);
-      // should always be false if the sub component can not be removed
-      org.junit.Assert.assertFalse(service.canBeRemoved());
-    }
-  }
 
   @Test
   public void testServiceMaintenance() throws Exception {