You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/02/13 21:04:10 UTC

[36/50] [abbrv] ambari git commit: AMBARI-19799. Optimize DB initialization for Ambari Server Unit Tests. Additional fixes 2. (mpapirkovskyy)

AMBARI-19799. Optimize DB initialization for Ambari Server Unit Tests. Additional fixes 2. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2d7276e2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2d7276e2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2d7276e2

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2d7276e2d1338bc318273ab9ce13312966d1eb7f
Parents: 4b0c93e
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Thu Feb 2 02:03:00 2017 +0200
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 13 15:45:35 2017 -0500

----------------------------------------------------------------------
 .../server/controller/ControllerModule.java     | 18 ++--
 .../apache/ambari/server/H2DatabaseCleaner.java | 91 ++++++++++++++------
 .../ambari/server/orm/dao/CrudDAOTest.java      |  5 +-
 .../ambari/server/orm/dao/UpgradeDAOTest.java   | 16 ++--
 .../ComponentVersionCheckActionTest.java        | 29 +++++--
 .../upgrades/UpgradeActionTest.java             | 74 +++++++++++-----
 .../ambari/server/state/ConfigHelperTest.java   | 14 ++-
 .../state/cluster/AlertDataManagerTest.java     |  1 +
 pom.xml                                         |  4 +
 9 files changed, 175 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index b634a62..8646e51 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -214,14 +214,6 @@ public class ControllerModule extends AbstractModule {
     DatabaseType databaseType = configuration.getDatabaseType();
     LOG.info("Detected {} as the database type from the JDBC URL", databaseType);
 
-    // custom jdbc driver properties
-    Properties customDatabaseDriverProperties = configuration.getDatabaseCustomProperties();
-    properties.putAll(customDatabaseDriverProperties);
-
-    // custom persistence properties
-    Properties customPersistenceProperties = configuration.getPersistenceCustomProperties();
-    properties.putAll(customPersistenceProperties);
-
     switch (configuration.getPersistenceType()) {
       case IN_MEMORY:
         properties.setProperty(JDBC_URL, Configuration.JDBC_IN_MEMORY_URL);
@@ -229,7 +221,6 @@ public class ControllerModule extends AbstractModule {
         properties.setProperty(JDBC_USER, Configuration.JDBC_IN_MEMORY_USER);
         properties.setProperty(JDBC_PASSWORD, Configuration.JDBC_IN_MEMORY_PASSWORD);
         properties.setProperty(DDL_GENERATION, CREATE_ONLY);
-        properties.setProperty(DDL_GENERATION_MODE, DDL_BOTH_GENERATION);
         properties.setProperty(THROW_EXCEPTIONS, "true");
         break;
       case REMOTE:
@@ -242,6 +233,15 @@ public class ControllerModule extends AbstractModule {
         break;
     }
 
+    //allow to override values above
+    // custom jdbc driver properties
+    Properties customDatabaseDriverProperties = configuration.getDatabaseCustomProperties();
+    properties.putAll(customDatabaseDriverProperties);
+
+    // custom persistence properties
+    Properties customPersistenceProperties = configuration.getPersistenceCustomProperties();
+    properties.putAll(customPersistenceProperties);
+
     // determine the type of pool to use
     boolean isConnectionPoolingExternal = false;
     ConnectionPoolType connectionPoolType = configuration.getConnectionPoolType();

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/H2DatabaseCleaner.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/H2DatabaseCleaner.java b/ambari-server/src/test/java/org/apache/ambari/server/H2DatabaseCleaner.java
index 91477eb..493e88b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/H2DatabaseCleaner.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/H2DatabaseCleaner.java
@@ -26,6 +26,7 @@ import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
@@ -39,13 +40,69 @@ import javax.persistence.metamodel.EntityType;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.DBAccessorImpl;
 import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.StringUtils;
 
 import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
 
 public class H2DatabaseCleaner {
-  private static final String SEQ_INSERT_PREFIX = "INSERT INTO ambari_sequences";
-  private static List<String> seqInsertStatements;
+  private static final String SEQ_STATEMENT =
+      "INSERT INTO ambari_sequences(sequence_name, sequence_value) values (?, 0);";
+  private static List<String> sequenceList = new ArrayList<>();
+
+  static {
+        sequenceList.add("extension_id_seq");
+        sequenceList.add("resource_id_seq");
+        sequenceList.add("alert_target_id_seq");
+        sequenceList.add("topology_request_id_seq");
+        sequenceList.add("setting_id_seq");
+        sequenceList.add("principal_type_id_seq");
+        sequenceList.add("group_id_seq");
+        sequenceList.add("remote_cluster_id_seq");
+        sequenceList.add("privilege_id_seq");
+        sequenceList.add("servicecomponent_history_id_seq");
+        sequenceList.add("permission_id_seq");
+        sequenceList.add("principal_id_seq");
+        sequenceList.add("repo_version_id_seq");
+        sequenceList.add("cluster_version_id_seq");
+        sequenceList.add("topology_host_task_id_seq");
+        sequenceList.add("topology_logical_task_id_seq");
+        sequenceList.add("host_id_seq");
+        sequenceList.add("servicecomponentdesiredstate_id_seq");
+        sequenceList.add("configgroup_id_seq");
+        sequenceList.add("topology_host_group_id_seq");
+        sequenceList.add("upgrade_item_id_seq");
+        sequenceList.add("requestschedule_id_seq");
+        sequenceList.add("blueprint_setting_id_seq");
+        sequenceList.add("host_version_id_seq");
+        sequenceList.add("hostcomponentstate_id_seq");
+        sequenceList.add("cluster_id_seq");
+        sequenceList.add("view_instance_id_seq");
+        sequenceList.add("resourcefilter_id_seq");
+        sequenceList.add("alert_group_id_seq");
+        sequenceList.add("link_id_seq");
+        sequenceList.add("topology_host_info_id_seq");
+        sequenceList.add("viewentity_id_seq");
+        sequenceList.add("alert_notice_id_seq");
+        sequenceList.add("user_id_seq");
+        sequenceList.add("upgrade_id_seq");
+        sequenceList.add("stack_id_seq");
+        sequenceList.add("alert_current_id_seq");
+        sequenceList.add("widget_id_seq");
+        sequenceList.add("remote_cluster_service_id_seq");
+        sequenceList.add("alert_history_id_seq");
+        sequenceList.add("config_id_seq");
+        sequenceList.add("upgrade_group_id_seq");
+        sequenceList.add("member_id_seq");
+        sequenceList.add("service_config_id_seq");
+        sequenceList.add("widget_layout_id_seq");
+        sequenceList.add("hostcomponentdesiredstate_id_seq");
+        sequenceList.add("operation_level_id_seq");
+        sequenceList.add("servicecomponent_version_id_seq");
+        sequenceList.add("host_role_command_id_seq");
+        sequenceList.add("alert_definition_id_seq");
+        sequenceList.add("resource_type_id_seq");
+  }
 
   public static void clearDatabaseAndStopPersistenceService(Injector injector) throws AmbariException, SQLException {
     clearDatabase(injector.getProvider(EntityManager.class).get());
@@ -57,34 +114,20 @@ public class H2DatabaseCleaner {
       Configuration.JDBC_IN_MEMORY_USER, Configuration.JDBC_IN_MEMORY_PASSWORD);
   }
 
-  private static List<String> collectSequenceInserts() {
-    try {
-      ArrayList<String> statementList = new ArrayList<>();
-      for (String s : Files.readAllLines(Paths.get(DEFAULT_CREATE_JDBC_FILE_NAME), Charset.defaultCharset())) {
-        if (s.startsWith(SEQ_INSERT_PREFIX)) {
-          statementList.add(s);
-        }
-      }
-      return statementList;
-    } catch (IOException e) {
-      return Collections.emptyList();
-    }
-  }
-
   //TODO all tests this method is used in should be modified to remove hardcoded IDs
   public static void resetSequences(Injector injector) {
     DBAccessorImpl dbAccessor = injector.getInstance(DBAccessorImpl.class);
     try {
       if (dbAccessor.tableExists("ambari_sequences")) {
-        if (seqInsertStatements == null) {
-          seqInsertStatements = collectSequenceInserts();
-        }
-        if (!CollectionUtils.isEmpty(seqInsertStatements)) {
-          dbAccessor.truncateTable("ambari_sequences");
-
-          for (String insert : seqInsertStatements) {
-            dbAccessor.executeUpdate(insert);
+        dbAccessor.truncateTable("ambari_sequences");
+        PreparedStatement preparedStatement = dbAccessor.getConnection().prepareStatement(SEQ_STATEMENT);
+        try {
+          for (String sequenceName : sequenceList) {
+            preparedStatement.setString(1, sequenceName);
+            preparedStatement.executeUpdate();
           }
+        } finally {
+          preparedStatement.close();
         }
 
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/CrudDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/CrudDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/CrudDAOTest.java
index af54653..f3db856 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/CrudDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/CrudDAOTest.java
@@ -54,11 +54,12 @@ public class CrudDAOTest {
   @Before
   public void before() {
     injector = Guice.createInjector(new InMemoryDefaultTestModule());
-    stackDAO = injector.getInstance(StackDAO.class);
-    repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
     H2DatabaseCleaner.resetSequences(injector);
     injector.getInstance(GuiceJpaInitializer.class);
 
+    stackDAO = injector.getInstance(StackDAO.class);
+    repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
+
     // required to populate stacks into the database
     injector.getInstance(AmbariMetaInfo.class);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
index 3c5970d..dc30c6c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
@@ -80,7 +80,7 @@ public class UpgradeDAOTest {
 
     // create upgrade entities
     UpgradeEntity entity = new UpgradeEntity();
-    entity.setClusterId(Long.valueOf(1));
+    entity.setClusterId(clusterId.longValue());
     entity.setRequestId(Long.valueOf(1));
     entity.setFromVersion("");
     entity.setToVersion("");
@@ -150,14 +150,14 @@ public class UpgradeDAOTest {
     // create upgrade entities
     RequestEntity requestEntity = new RequestEntity();
     requestEntity.setRequestId(1L);
-    requestEntity.setClusterId(1L);
+    requestEntity.setClusterId(clusterId.longValue());
     requestEntity.setStatus(HostRoleStatus.PENDING);
     requestEntity.setStages(new ArrayList<StageEntity>());
     requestDAO.create(requestEntity);
 
     UpgradeEntity entity1 = new UpgradeEntity();
     entity1.setId(11L);
-    entity1.setClusterId(1L);
+    entity1.setClusterId(clusterId.longValue());
     entity1.setDirection(Direction.UPGRADE);
     entity1.setRequestId(1L);
     entity1.setFromVersion("2.2.0.0-1234");
@@ -168,7 +168,7 @@ public class UpgradeDAOTest {
     dao.create(entity1);
     UpgradeEntity entity2 = new UpgradeEntity();
     entity2.setId(22L);
-    entity2.setClusterId(1L);
+    entity2.setClusterId(clusterId.longValue());
     entity2.setDirection(Direction.DOWNGRADE);
     entity2.setRequestId(1L);
     entity2.setFromVersion("2.3.0.0-4567");
@@ -179,7 +179,7 @@ public class UpgradeDAOTest {
     dao.create(entity2);
     UpgradeEntity entity3 = new UpgradeEntity();
     entity3.setId(33L);
-    entity3.setClusterId(1L);
+    entity3.setClusterId(clusterId.longValue());
     entity3.setDirection(Direction.UPGRADE);
     entity3.setRequestId(1L);
     entity3.setFromVersion("2.2.0.0-1234");
@@ -188,7 +188,7 @@ public class UpgradeDAOTest {
     entity3.setUpgradePackage("test-upgrade");
     entity3.setDowngradeAllowed(true);
     dao.create(entity3);
-    UpgradeEntity lastUpgradeForCluster = dao.findLastUpgradeForCluster(1, Direction.UPGRADE);
+    UpgradeEntity lastUpgradeForCluster = dao.findLastUpgradeForCluster(clusterId.longValue(), Direction.UPGRADE);
     assertNotNull(lastUpgradeForCluster);
     assertEquals(33L, (long)lastUpgradeForCluster.getId());
   }
@@ -202,14 +202,14 @@ public class UpgradeDAOTest {
   public void testUpdatableColumns() throws Exception {
     RequestEntity requestEntity = new RequestEntity();
     requestEntity.setRequestId(1L);
-    requestEntity.setClusterId(1L);
+    requestEntity.setClusterId(clusterId.longValue());
     requestEntity.setStatus(HostRoleStatus.PENDING);
     requestEntity.setStages(new ArrayList<StageEntity>());
     requestDAO.create(requestEntity);
 
     UpgradeEntity upgradeEntity = new UpgradeEntity();
     upgradeEntity.setId(11L);
-    upgradeEntity.setClusterId(1L);
+    upgradeEntity.setClusterId(clusterId.longValue());
     upgradeEntity.setDirection(Direction.UPGRADE);
     upgradeEntity.setRequestId(1L);
     upgradeEntity.setFromVersion("2.2.0.0-1234");

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index 389c604..a20f0e1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -64,10 +64,13 @@ import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.inject.Guice;
 import com.google.inject.Inject;
@@ -78,6 +81,9 @@ import com.google.inject.persist.UnitOfWork;
  * Tests upgrade-related server side actions
  */
 public class ComponentVersionCheckActionTest {
+  private static final Logger LOG = LoggerFactory.getLogger(ComponentVersionCheckActionTest.class);
+
+
   private static final String HDP_2_1_1_0 = "2.1.1.0-1";
   private static final String HDP_2_1_1_1 = "2.1.1.1-2";
 
@@ -120,6 +126,7 @@ public class ComponentVersionCheckActionTest {
   @Before
   public void setup() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    EventBusSynchronizer.synchronizeAmbariEventPublisher(m_injector);
     m_injector.getInstance(GuiceJpaInitializer.class);
     m_injector.injectMembers(this);
     m_injector.getInstance(UnitOfWork.class).begin();
@@ -185,9 +192,8 @@ public class ComponentVersionCheckActionTest {
     hostVersionDAO.create(entity);
   }
 
-  private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
-    String clusterName = "c1";
-    String hostName = "h1";
+  private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack,
+                                            String targetRepo, String clusterName, String hostName) throws Exception {
 
     Clusters clusters = m_injector.getInstance(Clusters.class);
     clusters.addCluster(clusterName, sourceStack);
@@ -219,6 +225,16 @@ public class ComponentVersionCheckActionTest {
     c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
+  }
+
+  private void createNewRepoVersion(StackId targetStack, String targetRepo, String clusterName,
+                                    String hostName) throws AmbariException {
+    Clusters clusters = m_injector.getInstance(Clusters.class);
+    StackDAO stackDAO = m_injector.getInstance(StackDAO.class);
+
+    StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
+
+    Cluster c = clusters.getCluster(clusterName);
     // Create the new repo version
     String urlInfo = "[{'repositories':["
         + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetRepo + "'}"
@@ -284,8 +300,10 @@ public class ComponentVersionCheckActionTest {
     StackId targetStack = HDP_22_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_2_1_0;
+    String clusterName = "c1";
+    String hostName = "h1";
 
-    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo, clusterName, hostName);
 
     Clusters clusters = m_injector.getInstance(Clusters.class);
     Cluster cluster = clusters.getCluster("c1");
@@ -296,9 +314,10 @@ public class ComponentVersionCheckActionTest {
     createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", "h1");
     createNewServiceComponentHost(cluster, "HDFS", "DATANODE", "h1");
 
+    createNewRepoVersion(targetStack, targetRepo, clusterName, hostName);
+
     // create some configs
     createConfigs(cluster);
-
     // setup the cluster for the upgrade across stacks
     cluster.setCurrentStackVersion(sourceStack);
     cluster.setDesiredStackVersion(targetStack);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index 42a4ac5..bb76a71 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -85,6 +85,7 @@ import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.stack.UpgradePack;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -161,6 +162,7 @@ public class UpgradeActionTest {
   @Before
   public void setup() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    EventBusSynchronizer.synchronizeAmbariEventPublisher(m_injector);
     m_injector.getInstance(GuiceJpaInitializer.class);
     m_injector.injectMembers(this);
     m_injector.getInstance(UnitOfWork.class).begin();
@@ -275,15 +277,13 @@ public class UpgradeActionTest {
     hostVersionDAO.create(entityTarget);
   }
 
-  private void makeUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
-    String hostName = "h1";
+  private void createUpgradeClusterAndSourceRepo(StackId sourceStack, String sourceRepo,
+                                                 String hostName) throws Exception {
 
     clusters.addCluster(clusterName, sourceStack);
 
     StackEntity stackEntitySource = stackDAO.find(sourceStack.getStackName(), sourceStack.getStackVersion());
-    StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
     assertNotNull(stackEntitySource);
-    assertNotNull(stackEntityTarget);
 
     Cluster c = clusters.getCluster(clusterName);
     c.setDesiredStackVersion(sourceStack);
@@ -320,6 +320,14 @@ public class UpgradeActionTest {
     c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
+  }
+
+  private void createUpgradeClusterTargetRepo(StackId targetStack, String targetRepo,
+                                              String hostName) throws AmbariException {
+    Cluster c = clusters.getCluster(clusterName);
+    StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
+    assertNotNull(stackEntityTarget);
+
     // Create the new repo version
     String urlInfo = "[{'repositories':["
             + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetStack.getStackId() + "'}"
@@ -354,16 +362,14 @@ public class UpgradeActionTest {
     assertEquals(RepositoryVersionState.INSTALLED, hostVersions.get(0).getState());
   }
 
-  private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
-    String hostName = "h1";
+  private void makeCrossStackUpgradeClusterAndSourceRepo(StackId sourceStack, String sourceRepo,
+                                                         String hostName)throws Exception {
 
     clusters.addCluster(clusterName, sourceStack);
 
     StackEntity stackEntitySource = stackDAO.find(sourceStack.getStackName(), sourceStack.getStackVersion());
-    StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
 
     assertNotNull(stackEntitySource);
-    assertNotNull(stackEntityTarget);
 
     Cluster c = clusters.getCluster(clusterName);
     c.setCurrentStackVersion(sourceStack);
@@ -384,11 +390,18 @@ public class UpgradeActionTest {
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
     c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
+  }
+
+  private void makeCrossStackUpgradeTargetRepo(StackId targetStack, String targetRepo, String hostName) throws Exception{
+    StackEntity stackEntityTarget = stackDAO.find(targetStack.getStackName(), targetStack.getStackVersion());
+    assertNotNull(stackEntityTarget);
+    Cluster c = clusters.getCluster(clusterName);
+
 
     // Create the new repo version
     String urlInfo = "[{'repositories':["
-            + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetRepo + "'}"
-            + "], 'OperatingSystems/os_type':'redhat6'}]";
+        + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetRepo + "'}"
+        + "], 'OperatingSystems/os_type':'redhat6'}]";
     repoVersionDAO.create(stackEntityTarget, targetRepo, String.valueOf(System.currentTimeMillis()), urlInfo);
 
     // Start upgrading the newer repo
@@ -396,7 +409,7 @@ public class UpgradeActionTest {
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
-            RepositoryVersionState.CURRENT);
+        RepositoryVersionState.CURRENT);
 
     HostDAO hostDAO = m_injector.getInstance(HostDAO.class);
 
@@ -418,6 +431,7 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_22_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_2_0_1;
+    String hostName = "h1";
 
     // Must be a NON_ROLLING upgrade that jumps stacks in order for it to apply config changes.
     // That upgrade pack has changes for ZK and NameNode.
@@ -426,10 +440,8 @@ public class UpgradeActionTest {
     Map<String, UpgradePack> packs = ambariMetaInfo.getUpgradePacks(sourceStack.getStackName(), sourceStack.getStackVersion());
     Assert.assertTrue(packs.containsKey(upgradePackName));
 
-    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
-
-    RepositoryVersionEntity targetRve = repoVersionDAO.findByStackNameAndVersion("HDP", targetRepo);
-    Assert.assertNotNull(targetRve);
+    makeCrossStackUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
+//    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
     Cluster cluster = clusters.getCluster(clusterName);
 
@@ -446,6 +458,11 @@ public class UpgradeActionTest {
     createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", "h1");
     createNewServiceComponentHost(cluster, "HDFS", "DATANODE", "h1");
 
+    makeCrossStackUpgradeTargetRepo(targetStack, targetRepo, hostName);
+
+    RepositoryVersionEntity targetRve = repoVersionDAO.findByStackNameAndVersion("HDP", targetRepo);
+    Assert.assertNotNull(targetRve);
+
     // Create some configs
     createConfigs(cluster);
     Collection<Config> configs = cluster.getAllConfigs();
@@ -583,8 +600,10 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_21_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_1_1_1;
+    String hostName = "h1";
 
-    makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    createUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
+    createUpgradeClusterTargetRepo(targetStack, targetRepo, hostName);
 
     // Verify the repo before calling Finalize
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
@@ -630,8 +649,10 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_21_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_1_1_1;
+    String hostName = "h1";
 
-    makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    createUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
+    createUpgradeClusterTargetRepo(targetStack, targetRepo, hostName);
 
     // move the old version from CURRENT to INSTALLED and the new version from
     // UPGRADED to CURRENT - this will simulate what happens when a host is
@@ -696,8 +717,10 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_22_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_2_0_1;
+    String hostName = "h1";
 
-    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    makeCrossStackUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
+    makeCrossStackUpgradeTargetRepo(targetStack, targetRepo, hostName);
 
     Cluster cluster = clusters.getCluster(clusterName);
 
@@ -747,10 +770,12 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_22_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_2_0_1;
+    String hostName = "h1";
 
-    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    makeCrossStackUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
     Cluster cluster = clusters.getCluster(clusterName);
 
+
     // install HDFS with some components
     Service service = installService(cluster, "HDFS");
     addServiceComponent(cluster, service, "NAMENODE");
@@ -758,6 +783,8 @@ public class UpgradeActionTest {
     createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", "h1");
     createNewServiceComponentHost(cluster, "HDFS", "DATANODE", "h1");
 
+    makeCrossStackUpgradeTargetRepo(targetStack, targetRepo, hostName);
+
     // create some configs
     createConfigs(cluster);
 
@@ -836,8 +863,9 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_22_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_2_0_1;
+    String hostName = "h1";
 
-    makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    makeCrossStackUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
 
     Cluster cluster = clusters.getCluster(clusterName);
 
@@ -847,6 +875,7 @@ public class UpgradeActionTest {
     createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", "h1");
     createNewServiceComponentHost(cluster, "HDFS", "DATANODE", "h1");
 
+    makeCrossStackUpgradeTargetRepo(targetStack, targetRepo, hostName);
     // create some configs
     createConfigs(cluster);
 
@@ -915,8 +944,9 @@ public class UpgradeActionTest {
     StackId targetStack = HDP_21_STACK;
     String sourceRepo = HDP_2_1_1_0;
     String targetRepo = HDP_2_1_1_1;
+    String hostName = "h1";
 
-    makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+    createUpgradeClusterAndSourceRepo(sourceStack, sourceRepo, hostName);
 
     // Verify the repo before calling Finalize
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
@@ -930,6 +960,8 @@ public class UpgradeActionTest {
     ServiceComponentHost nnSCH = createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", "h1");
     ServiceComponentHost dnSCH = createNewServiceComponentHost(cluster, "HDFS", "DATANODE", "h1");
 
+    createUpgradeClusterTargetRepo(targetStack, targetRepo, hostName);
+
     // fake their upgrade
     nnSCH.setStackVersion(nnSCH.getDesiredStackVersion());
     nnSCH.setVersion(targetRepo);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index 5d849c9..b1c10f5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -58,9 +58,7 @@ import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.host.HostFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.junit.After;
-import org.junit.AfterClass;
 import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.runners.Enclosed;
 import org.junit.runner.RunWith;
@@ -90,8 +88,8 @@ public class ConfigHelperTest {
     private static AmbariMetaInfo metaInfo;
     private static ConfigFactory configFactory;
 
-    @BeforeClass
-    public static void setup() throws Exception {
+    @Before
+    public void setup() throws Exception {
       // Set the authenticated user
       // TODO: remove this or replace the authenticated user to test authorization rules
       SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator("admin"));
@@ -222,8 +220,8 @@ public class ConfigHelperTest {
       }}, null);
     }
 
-    @AfterClass
-    public static void tearDown() throws AmbariException, SQLException {
+    @After
+    public void tearDown() throws AmbariException, SQLException {
       H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
 
       // Clear the authenticated user
@@ -536,7 +534,7 @@ public class ConfigHelperTest {
               configHelper.getEffectiveDesiredTags(cluster, "h3"));
 
       Assert.assertNotNull(effectiveAttributes);
-      Assert.assertEquals(10, effectiveAttributes.size());
+      Assert.assertEquals(7, effectiveAttributes.size());
 
       Assert.assertTrue(effectiveAttributes.containsKey("global3"));
       Map<String, Map<String, String>> globalAttrs = effectiveAttributes.get("global3");
@@ -899,7 +897,7 @@ public class ConfigHelperTest {
       // set up expectations
       expect(sch.getActualConfigs()).andReturn(schReturn).times(6);
       expect(sch.getHostName()).andReturn("h1").anyTimes();
-      expect(sch.getClusterId()).andReturn(1l).anyTimes();
+      expect(sch.getClusterId()).andReturn(cluster.getClusterId()).anyTimes();
       expect(sch.getServiceName()).andReturn("FLUME").anyTimes();
       expect(sch.getServiceComponentName()).andReturn("FLUME_HANDLER").anyTimes();
       replay(sch);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
index 05aeddb..4ad93e6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/AlertDataManagerTest.java
@@ -112,6 +112,7 @@ public class AlertDataManagerTest {
   @Before
   public void setup() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    EventBusSynchronizer.synchronizeAlertEventPublisher(m_injector);
     m_injector.getInstance(GuiceJpaInitializer.class);
     m_injector.getInstance(UnitOfWork.class).begin();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d7276e2/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3f5a884..2b0cb30 100644
--- a/pom.xml
+++ b/pom.xml
@@ -158,6 +158,10 @@
           <artifactId>maven-clean-plugin</artifactId>
           <version>2.5</version>
         </plugin>
+        <plugin>
+          <artifactId>maven-assembly-plugin</artifactId>
+          <version>2.2-beta-5</version>
+        </plugin>
       </plugins>
     </pluginManagement>
     <plugins>