You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/07/19 14:20:13 UTC

[11/13] ambari git commit: AMBARI-21450 - Fixing Unit Test Compilation Issues From trunk Merge (jonathanhurley)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56362fd6/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index bc1c19a..608c3ae 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,7 +21,6 @@ package org.apache.ambari.server.api.services;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
@@ -59,7 +58,7 @@ import org.apache.ambari.server.orm.OrmTestHelper;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.MetainfoDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
-import org.apache.ambari.server.orm.entities.MetainfoEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.stack.StackManager;
 import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.AutoDeployInfo;
@@ -91,8 +90,6 @@ import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
-import org.easymock.Capture;
-import org.easymock.EasyMock;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;
@@ -245,8 +242,7 @@ public class AmbariMetaInfoTest {
     assertNotNull(redhat6Repo);
     for (RepositoryInfo ri : redhat6Repo) {
       if (STACK_NAME_HDP.equals(ri.getRepoName())) {
-        assertFalse(ri.getBaseUrl().equals(ri.getDefaultBaseUrl()));
-        assertEquals(ri.getBaseUrl(), ri.getLatestBaseUrl());
+        assertTrue(ri.getBaseUrl().equals(ri.getDefaultBaseUrl()));
       }
     }
   }
@@ -284,165 +280,6 @@ public class AmbariMetaInfoTest {
   }
 
   @Test
-  public void testGetRepositoryUpdatedBaseUrl() throws Exception {
-    // Scenario: user has internet and but calls to set repos via api
-    // use whatever they set
-    String buildDir = tmpFolder.getRoot().getAbsolutePath();
-    TestAmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
-    // The current stack already has (HDP, 2.1.1, redhat6)
-
-    // Updating the baseUrl
-    String newBaseUrl = "http://myprivate-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0";
-    ambariMetaInfo.updateRepoBaseURL(STACK_NAME_HDP, "2.1.1", "redhat6",
-            HDP_REPO_ID, newBaseUrl);
-    RepositoryInfo repoInfo = ambariMetaInfo.getRepository(STACK_NAME_HDP, "2.1.1", "redhat6",
-            HDP_REPO_ID);
-    assertEquals(newBaseUrl, repoInfo.getBaseUrl());
-    String prevBaseUrl = repoInfo.getDefaultBaseUrl();
-
-    // mock expectations
-    MetainfoDAO metainfoDAO = ambariMetaInfo.metaInfoDAO;
-    reset(metainfoDAO);
-    MetainfoEntity entity = createNiceMock(MetainfoEntity.class);
-    expect(metainfoDAO.findByKey("repo:/HDP/2.1.1/redhat6/HDP-2.1.1:baseurl")).andReturn(entity).atLeastOnce();
-    expect(entity.getMetainfoValue()).andReturn(newBaseUrl).atLeastOnce();
-    replay(metainfoDAO, entity);
-
-    ambariMetaInfo.init();
-
-    waitForAllReposToBeResolved(ambariMetaInfo);
-
-    List<RepositoryInfo> redhat6Repo = ambariMetaInfo.getRepositories(
-        STACK_NAME_HDP, "2.1.1", "redhat6");
-
-    assertNotNull(redhat6Repo);
-
-    for (RepositoryInfo ri : redhat6Repo) {
-      if (HDP_REPO_NAME.equals(ri.getRepoName())) {
-        assertEquals(newBaseUrl, ri.getBaseUrl());
-        // defaultBaseUrl and baseUrl should not be same, since it is updated.
-        assertFalse(ri.getBaseUrl().equals(ri.getDefaultBaseUrl()));
-      }
-    }
-
-    Capture<MetainfoEntity> c = new Capture<MetainfoEntity>();
-
-    metainfoDAO = ambariMetaInfo.metaInfoDAO;
-    reset(metainfoDAO);
-    reset(entity);
-    expect(metainfoDAO.findByKey("repo:/HDP/2.1.1/redhat6/HDP-2.1.1:baseurl")).andReturn(entity).atLeastOnce();
-    expect(metainfoDAO.merge(EasyMock.capture(c))).andReturn(entity).atLeastOnce();
-    replay(metainfoDAO, entity);
-
-    // Reset the database with the original baseUrl
-    ambariMetaInfo.updateRepoBaseURL(STACK_NAME_HDP, "2.1.1", "redhat6",
-            HDP_REPO_ID, prevBaseUrl);
-
-    assertEquals(prevBaseUrl, c.getValue().getMetainfoValue());
-    assertTrue(repoInfo.isBaseUrlFromSaved());
-
-  }
-
-  @Test
-  public void testGetRepositoryUpdatedUtilsBaseUrl() throws Exception {
-    // Scenario: user has internet and but calls to set repos via api
-    // use whatever they set
-    String stackVersion = "0.2";
-    String buildDir = tmpFolder.getRoot().getAbsolutePath();
-    TestAmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
-
-    // Updating the baseUrl
-    String newBaseUrl = "http://myprivate-repo-1.hortonworks.com/HDP-Utils/centos6/2.x/updates/2.0.6.0";
-    ambariMetaInfo.updateRepoBaseURL(STACK_NAME_HDP, stackVersion, "redhat6",
-            REPO_ID, newBaseUrl);
-    RepositoryInfo repoInfo = ambariMetaInfo.getRepository(STACK_NAME_HDP, stackVersion, "redhat6",
-            REPO_ID);
-    assertEquals(newBaseUrl, repoInfo.getBaseUrl());
-    String prevBaseUrl = repoInfo.getDefaultBaseUrl();
-
-    // mock expectations
-    MetainfoDAO metainfoDAO = ambariMetaInfo.metaInfoDAO;
-    reset(metainfoDAO);
-    MetainfoEntity entity = createNiceMock(MetainfoEntity.class);
-    expect(metainfoDAO.findByKey("repo:/HDP/0.2/redhat6/HDP-UTILS-1.1.0.15:baseurl")).andReturn(entity).atLeastOnce();
-    expect(entity.getMetainfoValue()).andReturn(newBaseUrl).atLeastOnce();
-    replay(metainfoDAO, entity);
-
-    ambariMetaInfo.init();
-
-    List<RepositoryInfo> redhat6Repo = ambariMetaInfo.getRepositories(
-            STACK_NAME_HDP, stackVersion, "redhat6");
-    assertNotNull(redhat6Repo);
-    for (RepositoryInfo ri : redhat6Repo) {
-      if (HDP_UTILS_REPO_NAME.equals(ri.getRepoName())) {
-        assertEquals(newBaseUrl, ri.getBaseUrl());
-        // defaultBaseUrl and baseUrl should not be same, since it is updated.
-        assertFalse(ri.getBaseUrl().equals(ri.getDefaultBaseUrl()));
-      }
-    }
-
-    // Reset the database with the original baseUrl
-    ambariMetaInfo.updateRepoBaseURL(STACK_NAME_HDP, stackVersion, "redhat6",
-            REPO_ID, prevBaseUrl);
-  }
-
-  @Test
-  public void testGetRepositoryNoInternetUpdatedBaseUrl() throws Exception {
-    // Scenario: user has no internet and but calls to set repos via api
-    // use whatever they set
-    String newBaseUrl = "http://myprivate-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0";
-    String buildDir = tmpFolder.getRoot().getAbsolutePath();
-    TestAmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
-    // The current stack already has (HDP, 2.1.1, redhat6).
-
-    // Deleting the json file referenced by the latestBaseUrl to simulate No
-    // Internet.
-    File latestUrlFile = new File(buildDir, "ambari-metaInfo/HDP/2.1.1/repos/hdp.json");
-    if (System.getProperty("os.name").contains("Windows")) {
-      latestUrlFile.deleteOnExit();
-    }
-    else {
-      FileUtils.deleteQuietly(latestUrlFile);
-      assertTrue(!latestUrlFile.exists());
-    }
-
-    // Update baseUrl
-    ambariMetaInfo.updateRepoBaseURL("HDP", "2.1.1", "redhat6", "HDP-2.1.1",
-        newBaseUrl);
-    RepositoryInfo repoInfo = ambariMetaInfo.getRepository(STACK_NAME_HDP, "2.1.1", "redhat6",
-        STACK_NAME_HDP + "-2.1.1");
-    assertEquals(newBaseUrl, repoInfo.getBaseUrl());
-    String prevBaseUrl = repoInfo.getDefaultBaseUrl();
-
-    // mock expectations
-    MetainfoDAO metainfoDAO = ambariMetaInfo.metaInfoDAO;
-    reset(metainfoDAO);
-    MetainfoEntity entity = createNiceMock(MetainfoEntity.class);
-    expect(metainfoDAO.findByKey("repo:/HDP/2.1.1/redhat6/HDP-2.1.1:baseurl")).andReturn(entity).atLeastOnce();
-    expect(entity.getMetainfoValue()).andReturn(newBaseUrl).atLeastOnce();
-    replay(metainfoDAO, entity);
-
-    ambariMetaInfo.init();
-
-    waitForAllReposToBeResolved(ambariMetaInfo);
-
-    List<RepositoryInfo> redhat6Repo = ambariMetaInfo.getRepositories(
-        STACK_NAME_HDP, "2.1.1", "redhat6");
-    assertNotNull(redhat6Repo);
-    for (RepositoryInfo ri : redhat6Repo) {
-      if (STACK_NAME_HDP.equals(ri.getRepoName())) {
-        // baseUrl should point to the updated baseUrl
-        assertEquals(newBaseUrl, ri.getBaseUrl());
-        assertFalse(ri.getDefaultBaseUrl().equals(ri.getBaseUrl()));
-      }
-    }
-
-    // Reset the database with the original baseUrl
-    ambariMetaInfo.updateRepoBaseURL(STACK_NAME_HDP, "2.1.1", "redhat6",
-        STACK_NAME_HDP + "-2.1.1", prevBaseUrl);
-  }
-
-  @Test
   public void isSupportedStack() throws AmbariException {
     boolean supportedStack = metaInfo.isSupportedStack(STACK_NAME_HDP,
         STACK_VERSION_HDP);
@@ -512,14 +349,14 @@ public class AmbariMetaInfoTest {
   public void testGetRepos() throws Exception {
     Map<String, List<RepositoryInfo>> repos = metaInfo.getRepository(
         STACK_NAME_HDP, STACK_VERSION_HDP);
-    Set<String> centos5Cnt = new HashSet<String>();
-    Set<String> centos6Cnt = new HashSet<String>();
-    Set<String> redhat6cnt = new HashSet<String>();
-    Set<String> redhat5cnt = new HashSet<String>();
+    Set<String> centos5Cnt = new HashSet<>();
+    Set<String> centos6Cnt = new HashSet<>();
+    Set<String> redhat6cnt = new HashSet<>();
+    Set<String> redhat5cnt = new HashSet<>();
 
     for (List<RepositoryInfo> vals : repos.values()) {
       for (RepositoryInfo repo : vals) {
-        LOG.debug("Dumping repo info : " + repo.toString());
+        LOG.debug("Dumping repo info : {}", repo);
         if (repo.getOsType().equals("centos5")) {
           centos5Cnt.add(repo.getRepoId());
         } else if (repo.getOsType().equals("centos6")) {
@@ -549,17 +386,16 @@ public class AmbariMetaInfoTest {
   }
 
 
-  @Test
   /**
    * Make sure global mapping is avaliable when global.xml is
    * in the path.
-   * @throws Exception
    */
+  @Test
   public void testGlobalMapping() throws Exception {
     ServiceInfo sinfo = metaInfo.getService("HDP",
         "0.2", "HDFS");
     List<PropertyInfo> pinfo = sinfo.getProperties();
-    /** check all the config knobs and make sure the global one is there **/
+    // check all the config knobs and make sure the global one is there
     boolean checkforglobal = false;
 
     for (PropertyInfo pinfol: pinfo) {
@@ -987,7 +823,7 @@ public class AmbariMetaInfoTest {
                 }
               }
             }
-            LinkedList<String> failedMetrics = new LinkedList<String>();
+            LinkedList<String> failedMetrics = new LinkedList<>();
             for (MetricDefinition metricDefinition : list) {
               if ("ganglia".equals(metricDefinition.getType())) {
                 //all ams metrics should be temporal
@@ -1003,7 +839,7 @@ public class AmbariMetaInfoTest {
 
               }
             }
-            Assert.assertEquals(failedMetrics.toString() +
+            Assert.assertEquals(failedMetrics +
                 " metrics defined with pointInTime=true for both jmx and ganglia types.",
               0, failedMetrics.size());
           }
@@ -1668,26 +1504,10 @@ public class AmbariMetaInfoTest {
 
     for (RepositoryInfo ri : metaInfo.getRepositories("HDP", "2.1.1", "centos6")) {
       Assert.assertEquals(
-          "Expected the base url to be set properly",
-          "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
-          ri.getLatestBaseUrl());
-      Assert.assertEquals(
           "Expected the default URL to be the same as in the xml file",
           "http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0",
           ri.getDefaultBaseUrl());
     }
-
-    for (RepositoryInfo ri : metaInfo.getRepositories("HDP", "2.1.1", "suse11")) {
-      Assert.assertEquals(
-          "Expected hdp.json to be stripped from the url",
-          "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118",
-          ri.getLatestBaseUrl());
-    }
-
-    for (RepositoryInfo ri : metaInfo.getRepositories("HDP", "2.1.1", "sles11")) {
-      Assert.assertEquals("http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118",
-          ri.getLatestBaseUrl());
-    }
   }
 
   @Test
@@ -1844,6 +1664,9 @@ public class AmbariMetaInfoTest {
    */
   @Test
   public void testAlertDefinitionMerging() throws Exception {
+    final String stackVersion = "2.0.6";
+    final String repoVersion = "2.0.6-1234";
+
     Injector injector = Guice.createInjector(Modules.override(
         new InMemoryDefaultTestModule()).with(new MockModule()));
 
@@ -1851,8 +1674,9 @@ public class AmbariMetaInfoTest {
 
     injector.getInstance(GuiceJpaInitializer.class);
     injector.getInstance(EntityManager.class);
-    long clusterId = injector.getInstance(OrmTestHelper.class).createCluster(
-        "cluster" + System.currentTimeMillis());
+
+    OrmTestHelper ormHelper = injector.getInstance(OrmTestHelper.class);
+    long clusterId = ormHelper.createCluster("cluster" + System.currentTimeMillis());
 
     Class<?> c = metaInfo.getClass().getSuperclass();
 
@@ -1867,15 +1691,18 @@ public class AmbariMetaInfoTest {
     Clusters clusters = injector.getInstance(Clusters.class);
     Cluster cluster = clusters.getClusterById(clusterId);
     cluster.setDesiredStackVersion(
-        new StackId(STACK_NAME_HDP, "2.0.6"));
+        new StackId(STACK_NAME_HDP, stackVersion));
 
-    cluster.addService("HDFS");
+    RepositoryVersionEntity repositoryVersion = ormHelper.getOrCreateRepositoryVersion(
+        cluster.getCurrentStackVersion(), repoVersion);
+
+    cluster.addService("HDFS", repositoryVersion);
 
     metaInfo.reconcileAlertDefinitions(clusters);
 
     AlertDefinitionDAO dao = injector.getInstance(AlertDefinitionDAO.class);
     List<AlertDefinitionEntity> definitions = dao.findAll(clusterId);
-    assertEquals(12, definitions.size());
+    assertEquals(13, definitions.size());
 
     // figure out how many of these alerts were merged into from the
     // non-stack alerts.json
@@ -1887,7 +1714,7 @@ public class AmbariMetaInfoTest {
       }
     }
 
-    assertEquals(2, hostAlertCount);
+    assertEquals(3, hostAlertCount);
     assertEquals(10, definitions.size() - hostAlertCount);
 
     for (AlertDefinitionEntity definition : definitions) {
@@ -1898,7 +1725,7 @@ public class AmbariMetaInfoTest {
     metaInfo.reconcileAlertDefinitions(clusters);
 
     definitions = dao.findAll();
-    assertEquals(12, definitions.size());
+    assertEquals(13, definitions.size());
 
     for (AlertDefinitionEntity definition : definitions) {
       assertEquals(28, definition.getScheduleInterval().intValue());
@@ -1907,7 +1734,7 @@ public class AmbariMetaInfoTest {
     // find all enabled for the cluster should find 6 (the ones from HDFS;
     // it will not find the agent alert since it's not bound to the cluster)
     definitions = dao.findAllEnabled(cluster.getClusterId());
-    assertEquals(11, definitions.size());
+    assertEquals(12, definitions.size());
 
     // create new definition
     AlertDefinitionEntity entity = new AlertDefinitionEntity();
@@ -1926,19 +1753,19 @@ public class AmbariMetaInfoTest {
 
     // verify the new definition is found (6 HDFS + 1 new one)
     definitions = dao.findAllEnabled(cluster.getClusterId());
-    assertEquals(12, definitions.size());
+    assertEquals(13, definitions.size());
 
     // reconcile, which should disable our bad definition
     metaInfo.reconcileAlertDefinitions(clusters);
 
     // find all enabled for the cluster should find 6
     definitions = dao.findAllEnabled(cluster.getClusterId());
-    assertEquals(11, definitions.size());
+    assertEquals(12, definitions.size());
 
     // find all should find 6 HDFS + 1 disabled + 1 agent alert + 2 server
     // alerts
     definitions = dao.findAll();
-    assertEquals(13, definitions.size());
+    assertEquals(14, definitions.size());
 
     entity = dao.findById(entity.getDefinitionId());
     assertFalse(entity.getEnabled());
@@ -2177,4 +2004,4 @@ public class AmbariMetaInfoTest {
       }
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/56362fd6/ambari-server/src/test/java/org/apache/ambari/server/checks/AbstractCheckDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/AbstractCheckDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/AbstractCheckDescriptorTest.java
index dac4cae..a96ca6c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/AbstractCheckDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/AbstractCheckDescriptorTest.java
@@ -17,44 +17,31 @@
  */
 package org.apache.ambari.server.checks;
 
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 
-import junit.framework.Assert;
-import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.replay;
-
 import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
-import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
-import org.apache.ambari.server.orm.dao.HostVersionDAO;
-import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
-import org.apache.ambari.server.orm.dao.UpgradeDAO;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceImpl;
 import org.apache.ambari.server.state.stack.PrereqCheckType;
 import org.apache.ambari.server.state.stack.PrerequisiteCheck;
-import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
-import org.apache.commons.collections.map.HashedMap;
 import org.easymock.EasyMock;
 import org.junit.Test;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
 import com.google.inject.Provider;
 
+import junit.framework.Assert;
+
 /**
  * Unit tests for AbstractCheckDescriptor
  */
@@ -141,7 +128,7 @@ public class AbstractCheckDescriptorTest {
 
     Assert.assertEquals("", check.formatEntityList(null));
 
-    final LinkedHashSet<String> failedOn = new LinkedHashSet<String>();
+    final LinkedHashSet<String> failedOn = new LinkedHashSet<>();
     Assert.assertEquals("", check.formatEntityList(failedOn));
 
     failedOn.add("host1");

http://git-wip-us.apache.org/repos/asf/ambari/blob/56362fd6/ambari-server/src/test/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheckTest.java
index bfe0c3e..efcf3de 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/MapReduce2JobHistoryStatePreservingCheckTest.java
@@ -17,33 +17,35 @@
  */
 package org.apache.ambari.server.checks;
 
-import com.google.inject.Provider;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
-import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.stack.PrereqCheckStatus;
 import org.apache.ambari.server.state.stack.PrerequisiteCheck;
-import org.apache.ambari.server.state.stack.UpgradePack.PrerequisiteCheckConfig;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 
-import java.util.HashMap;
-import java.util.Map;
+import com.google.inject.Provider;
 
 /**
  * Tests for {@link org.apache.ambari.server.checks.MapReduce2JobHistoryStatePreservingCheckTest}
  */
 public class MapReduce2JobHistoryStatePreservingCheckTest {
   private final Clusters m_clusters = Mockito.mock(Clusters.class);
+  private final RepositoryVersionDAO m_repositoryVersionDao = Mockito.mock(RepositoryVersionDAO.class);
 
   private final MapReduce2JobHistoryStatePreservingCheck m_check = new MapReduce2JobHistoryStatePreservingCheck();
 
@@ -59,8 +61,20 @@ public class MapReduce2JobHistoryStatePreservingCheckTest {
         return m_clusters;
       }
     };
+
+    m_check.repositoryVersionDaoProvider = new Provider<RepositoryVersionDAO>() {
+      @Override
+      public RepositoryVersionDAO get() {
+        return m_repositoryVersionDao;
+      };
+    };
+
     Configuration config = Mockito.mock(Configuration.class);
     m_check.config = config;
+
+    RepositoryVersionEntity rve = Mockito.mock(RepositoryVersionEntity.class);
+    Mockito.when(rve.getType()).thenReturn(RepositoryType.STANDARD);
+    Mockito.when(m_repositoryVersionDao.findByStackNameAndVersion(Mockito.anyString(), Mockito.anyString())).thenReturn(rve);
   }
 
   /**
@@ -73,12 +87,9 @@ public class MapReduce2JobHistoryStatePreservingCheckTest {
     Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
     Mockito.when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP-2.3"));
 
-    Map<String, Service> services = new HashMap<String, Service>();
+    Map<String, Service> services = new HashMap<>();
     Mockito.when(cluster.getServices()).thenReturn(services);
 
-    ClusterVersionEntity clusterVersionEntity = Mockito.mock(ClusterVersionEntity.class);
-    Mockito.when(cluster.getCurrentClusterVersion()).thenReturn(clusterVersionEntity);
-
     PrereqCheckRequest request = new PrereqCheckRequest("cluster");
     request.setTargetStackId(new StackId("HDP", "2.3.1.1"));
     request.setSourceStackId(new StackId("HDP", "2.3.0.0"));
@@ -99,14 +110,14 @@ public class MapReduce2JobHistoryStatePreservingCheckTest {
 
     final DesiredConfig desiredConfig = Mockito.mock(DesiredConfig.class);
     Mockito.when(desiredConfig.getTag()).thenReturn("tag");
-    Map<String, DesiredConfig> configMap = new HashMap<String, DesiredConfig>();
+    Map<String, DesiredConfig> configMap = new HashMap<>();
     configMap.put("mapred-site", desiredConfig);
     configMap.put("yarn-site", desiredConfig);
 
     Mockito.when(cluster.getDesiredConfigs()).thenReturn(configMap);
     final Config config = Mockito.mock(Config.class);
     Mockito.when(cluster.getConfig(Mockito.anyString(), Mockito.anyString())).thenReturn(config);
-    final Map<String, String> properties = new HashMap<String, String>();
+    final Map<String, String> properties = new HashMap<>();
     Mockito.when(config.getProperties()).thenReturn(properties);
 
     PrerequisiteCheck check = new PrerequisiteCheck(null, null);
@@ -141,10 +152,7 @@ public class MapReduce2JobHistoryStatePreservingCheckTest {
       }
     });
     Mockito.when(cluster.getCurrentStackVersion()).thenReturn(new StackId("MYSTACK-12.2"));
-    ClusterVersionEntity clusterVersionEntity = Mockito.mock(ClusterVersionEntity.class);
-    Mockito.when(cluster.getCurrentClusterVersion()).thenReturn(clusterVersionEntity);
     RepositoryVersionEntity repositoryVersionEntity = Mockito.mock(RepositoryVersionEntity.class);
-    Mockito.when(clusterVersionEntity.getRepositoryVersion()).thenReturn(repositoryVersionEntity);
     Mockito.when(m_clusters.getCluster("c1")).thenReturn(cluster);
     PrereqCheckRequest request = new PrereqCheckRequest("c1");
 
@@ -152,4 +160,4 @@ public class MapReduce2JobHistoryStatePreservingCheckTest {
     boolean isApplicable = m_check.isApplicable(request);
     Assert.assertTrue(isApplicable);
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/56362fd6/ambari-server/src/test/java/org/apache/ambari/server/checks/RangerAuditDbCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/RangerAuditDbCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/RangerAuditDbCheckTest.java
index 94284ac..e3e14d5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/RangerAuditDbCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/RangerAuditDbCheckTest.java
@@ -22,17 +22,13 @@ import java.util.Map;
 
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.PrereqCheckRequest;
-import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
-import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.stack.PrereqCheckStatus;
 import org.apache.ambari.server.state.stack.PrerequisiteCheck;
-import org.apache.ambari.server.state.stack.UpgradePack.PrerequisiteCheckConfig;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
@@ -91,13 +87,13 @@ public class RangerAuditDbCheckTest {
 
     final DesiredConfig desiredConfig = Mockito.mock(DesiredConfig.class);
     Mockito.when(desiredConfig.getTag()).thenReturn("tag");
-    Map<String, DesiredConfig> configMap = new HashMap<String, DesiredConfig>();
+    Map<String, DesiredConfig> configMap = new HashMap<>();
     configMap.put("ranger-admin-site", desiredConfig);
 
     Mockito.when(cluster.getDesiredConfigs()).thenReturn(configMap);
     final Config config = Mockito.mock(Config.class);
     Mockito.when(cluster.getConfig(Mockito.anyString(), Mockito.anyString())).thenReturn(config);
-    final Map<String, String> properties = new HashMap<String, String>();
+    final Map<String, String> properties = new HashMap<>();
     Mockito.when(config.getProperties()).thenReturn(properties);
 
     properties.put("ranger.audit.source.type", "db");

http://git-wip-us.apache.org/repos/asf/ambari/blob/56362fd6/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
index 3a93fbf..83f9f2f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -36,19 +36,22 @@ import org.apache.ambari.server.H2DatabaseCleaner;
 import org.apache.ambari.server.agent.HeartbeatTestHelper;
 import org.apache.ambari.server.agent.RecoveryConfig;
 import org.apache.ambari.server.agent.RecoveryConfigHelper;
-import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.OrmTestHelper;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import com.google.common.collect.Sets;
 import com.google.common.eventbus.EventBus;
 import com.google.inject.Guice;
 import com.google.inject.Inject;
@@ -69,7 +72,11 @@ public class RecoveryConfigHelperTest {
   private RecoveryConfigHelper recoveryConfigHelper;
 
   @Inject
-  private AmbariEventPublisher eventPublisher;
+  private OrmTestHelper helper;
+
+  private final String STACK_VERSION = "0.1";
+  private final String REPO_VERSION = "0.1-1234";
+  private final StackId stackId = new StackId("HDP", STACK_VERSION);
 
   @Before
   public void setup() throws Exception {
@@ -114,7 +121,7 @@ public class RecoveryConfigHelperTest {
   public void testRecoveryConfigValues()
       throws Exception {
     String hostname = "hostname1";
-    Cluster cluster = getDummyCluster(hostname);
+    Cluster cluster = getDummyCluster(Sets.newHashSet(hostname));
     RecoveryConfig recoveryConfig = recoveryConfigHelper.getRecoveryConfig(cluster.getClusterName(), hostname);
     assertEquals(recoveryConfig.getMaxLifetimeCount(), "10");
     assertEquals(recoveryConfig.getMaxCount(), "4");
@@ -134,7 +141,9 @@ public class RecoveryConfigHelperTest {
   public void testServiceComponentInstalled()
       throws Exception {
     Cluster cluster = heartbeatTestHelper.getDummyCluster();
-    Service hdfs = cluster.addService(HDFS);
+
+    RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(cluster);
+    Service hdfs = cluster.addService(HDFS, repositoryVersion);
 
     hdfs.addServiceComponent(DATANODE).setRecoveryEnabled(true);
     hdfs.getServiceComponent(DATANODE).addServiceComponentHost(DummyHostname1);
@@ -167,7 +176,8 @@ public class RecoveryConfigHelperTest {
   public void testServiceComponentUninstalled()
       throws Exception {
     Cluster cluster = heartbeatTestHelper.getDummyCluster();
-    Service hdfs = cluster.addService(HDFS);
+    RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(cluster);
+    Service hdfs = cluster.addService(HDFS, repositoryVersion);
 
     hdfs.addServiceComponent(DATANODE).setRecoveryEnabled(true);
     hdfs.getServiceComponent(DATANODE).addServiceComponentHost(DummyHostname1);
@@ -202,7 +212,8 @@ public class RecoveryConfigHelperTest {
   public void testClusterEnvConfigChanged()
       throws Exception {
     Cluster cluster = heartbeatTestHelper.getDummyCluster();
-    Service hdfs = cluster.addService(HDFS);
+    RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(cluster);
+    Service hdfs = cluster.addService(HDFS, repositoryVersion);
 
     hdfs.addServiceComponent(DATANODE).setRecoveryEnabled(true);
     hdfs.getServiceComponent(DATANODE).addServiceComponentHost(DummyHostname1);
@@ -241,7 +252,8 @@ public class RecoveryConfigHelperTest {
   public void testMaintenanceModeChanged()
       throws Exception {
     Cluster cluster = heartbeatTestHelper.getDummyCluster();
-    Service hdfs = cluster.addService(HDFS);
+    RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(cluster);
+    Service hdfs = cluster.addService(HDFS, repositoryVersion);
 
     hdfs.addServiceComponent(DATANODE).setRecoveryEnabled(true);
     hdfs.getServiceComponent(DATANODE).addServiceComponentHost(DummyHostname1);
@@ -275,7 +287,8 @@ public class RecoveryConfigHelperTest {
   public void testServiceComponentRecoveryChanged()
       throws Exception {
     Cluster cluster = heartbeatTestHelper.getDummyCluster();
-    Service hdfs = cluster.addService(HDFS);
+    RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(cluster);
+    Service hdfs = cluster.addService(HDFS, repositoryVersion);
 
     hdfs.addServiceComponent(DATANODE).setRecoveryEnabled(true);
     hdfs.getServiceComponent(DATANODE).addServiceComponentHost(DummyHostname1);
@@ -315,8 +328,10 @@ public class RecoveryConfigHelperTest {
     // Create a cluster with 2 hosts
     Cluster cluster = getDummyCluster(hostNames);
 
+    RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(cluster);
+
     // Add HDFS service with DATANODE component to the cluster
-    Service hdfs = cluster.addService(HDFS);
+    Service hdfs = cluster.addService(HDFS, repositoryVersion);
 
     hdfs.addServiceComponent(DATANODE).setRecoveryEnabled(true);
 
@@ -340,6 +355,7 @@ public class RecoveryConfigHelperTest {
 
   private Cluster getDummyCluster(Set<String> hostNames)
       throws Exception {
+
     Map<String, String> configProperties = new HashMap<String, String>() {{
       put(RecoveryConfigHelper.RECOVERY_ENABLED_KEY, "true");
       put(RecoveryConfigHelper.RECOVERY_TYPE_KEY, "AUTO_START");
@@ -349,16 +365,9 @@ public class RecoveryConfigHelperTest {
       put(RecoveryConfigHelper.RECOVERY_RETRY_GAP_KEY, "2");
     }};
 
-    return heartbeatTestHelper.getDummyCluster("cluster1", "HDP-0.1", configProperties, hostNames);
-  }
-
-  private Cluster getDummyCluster(final String hostname)
-      throws Exception {
-
-    Set<String> hostNames = new HashSet<String>(){{
-      add(hostname);
-    }};
+    Cluster cluster = heartbeatTestHelper.getDummyCluster("cluster1", stackId, REPO_VERSION,
+        configProperties, hostNames);
 
-    return getDummyCluster(hostNames);
+    return cluster;
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/56362fd6/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
index 66a6e98..1af8321 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
@@ -552,7 +552,7 @@ public class AmbariCustomCommandExecutionHelperTest {
     EasyMock.replay(hostRoleCommand, actionManager, configHelper);
 
     ambariManagementController.createAction(actionRequest, requestProperties);
-
+    StackId stackId = clusters.getCluster("c1").getDesiredStackVersion();
     Request request = requestCapture.getValue();
     Stage stage = request.getStages().iterator().next();
     List<ExecutionCommandWrapper> commands = stage.getExecutionCommands("c1-c6401");
@@ -695,7 +695,7 @@ public class AmbariCustomCommandExecutionHelperTest {
       RepositoryVersionEntity repositoryVersion) throws AmbariException, AuthorizationException {
 
     ServiceRequest r1 = new ServiceRequest(clusterName, serviceName,
-        repositoryVersion.getId(), null, "false");
+        repositoryVersion.getStackId().getStackId(), repositoryVersion.getVersion(), null, "false");
 
     Set<ServiceRequest> requests = new HashSet<>();
     requests.add(r1);
@@ -733,4 +733,4 @@ public class AmbariCustomCommandExecutionHelperTest {
     ambariManagementController.createHostComponents(requests);
   }
 
-}
+}
\ No newline at end of file