You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ds...@apache.org on 2015/08/19 23:25:45 UTC

ambari git commit: AMBARI-12801 Removing hive.heapsize breaks template generation (dsen)

Repository: ambari
Updated Branches:
  refs/heads/trunk bcbb0ae41 -> c55b4a61d


AMBARI-12801 Removing hive.heapsize breaks template generation (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c55b4a61
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c55b4a61
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c55b4a61

Branch: refs/heads/trunk
Commit: c55b4a61d3806d91be96f787902237eeff9a5e18
Parents: bcbb0ae
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Aug 20 00:25:33 2015 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Aug 20 00:25:33 2015 +0300

----------------------------------------------------------------------
 .../server/upgrade/SchemaUpgradeHelper.java     |   1 +
 .../server/upgrade/UpgradeCatalog212.java       | 176 ++++++++++++
 .../0.12.0.2.0/package/scripts/params_linux.py  |  11 +-
 .../server/upgrade/UpgradeCatalog212Test.java   | 272 +++++++++++++++++++
 4 files changed, 455 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c55b4a61/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index 77e2e93..2fa4275 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -178,6 +178,7 @@ public class SchemaUpgradeHelper {
       catalogBinder.addBinding().to(UpgradeCatalog200.class);
       catalogBinder.addBinding().to(UpgradeCatalog210.class);
       catalogBinder.addBinding().to(UpgradeCatalog211.class);
+      catalogBinder.addBinding().to(UpgradeCatalog212.class);
       catalogBinder.addBinding().to(UpgradeCatalog220.class);
       catalogBinder.addBinding().to(FinalUpgradeCatalog.class);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c55b4a61/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog212.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog212.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog212.java
new file mode 100644
index 0000000..6f20423
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog212.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.configuration.Configuration.DatabaseType;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.dao.DaoUtils;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.utils.VersionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.support.JdbcUtils;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.MessageFormat;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.regex.Matcher;
+
+
+/**
+ * Upgrade catalog for version 2.1.2.
+ */
+public class UpgradeCatalog212 extends AbstractUpgradeCatalog {
+  private static final String HIVE_SITE = "hive-site";
+  private static final String HIVE_ENV = "hive-env";
+
+  /**
+   * Logger.
+   */
+  private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog212.class);
+
+  @Inject
+  DaoUtils daoUtils;
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Don't forget to register new UpgradeCatalogs in {@link org.apache.ambari.server.upgrade.SchemaUpgradeHelper.UpgradeHelperModule#configure()}
+   *
+   * @param injector Guice injector to track dependencies and uses bindings to inject them.
+   */
+  @Inject
+  public UpgradeCatalog212(Injector injector) {
+    super(injector);
+
+    daoUtils = injector.getInstance(DaoUtils.class);
+  }
+
+  // ----- UpgradeCatalog ----------------------------------------------------
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getTargetVersion() {
+    return "2.1.2";
+  }
+
+  // ----- AbstractUpgradeCatalog --------------------------------------------
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getSourceVersion() {
+    return "2.1.1";
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void executeDDLUpdates() throws AmbariException, SQLException {
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void executePreDMLUpdates() throws AmbariException, SQLException {
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void executeDMLUpdates() throws AmbariException, SQLException {
+    addNewConfigurationsFromXml();
+    addMissingConfigs();
+  }
+
+  protected void addMissingConfigs() throws AmbariException {
+    updateHiveConfigs();
+  }
+
+  protected void updateHiveConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(
+            AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          String content = null;
+          Boolean isHiveSitePresent = cluster.getDesiredConfigByType(HIVE_SITE) != null;
+          StackId stackId = cluster.getCurrentStackVersion();
+          Boolean isStackNotLess22 = (stackId != null && stackId.getStackName().equals("HDP") &&
+                  VersionUtils.compareVersions(stackId.getStackVersion(), "2.2") >= 0);
+
+          if (cluster.getDesiredConfigByType(HIVE_ENV) != null && isStackNotLess22) {
+            Map<String, String> hiveEnvProps = new HashMap<String, String>();
+            content = cluster.getDesiredConfigByType(HIVE_ENV).getProperties().get("content");
+            if(content != null) {
+              content = updateHiveEnvContent(content);
+              hiveEnvProps.put("content", content);
+            }
+            updateConfigurationPropertiesForCluster(cluster, HIVE_ENV, hiveEnvProps, true, true);
+          }
+
+          if (isHiveSitePresent && isStackNotLess22) {
+            Set<String> hiveSiteRemoveProps = new HashSet<String>();
+            hiveSiteRemoveProps.add("hive.heapsize");
+            hiveSiteRemoveProps.add("hive.optimize.mapjoin.mapreduce");
+            hiveSiteRemoveProps.add("hive.server2.enable.impersonation");
+            hiveSiteRemoveProps.add("hive.auto.convert.sortmerge.join.noconditionaltask");
+
+            updateConfigurationPropertiesForCluster(cluster, HIVE_SITE, new HashMap<String, String>(), hiveSiteRemoveProps, false, true);
+          }
+        }
+      }
+    }
+  }
+
+  protected String updateHiveEnvContent(String hiveEnvContent) {
+    if(hiveEnvContent == null) {
+      return null;
+    }
+    String oldHeapSizeRegex = "export HADOOP_HEAPSIZE=\"\\{\\{hive_heapsize\\}\\}\"\\s*\\n" +
+            "export HADOOP_CLIENT_OPTS=\"-Xmx\\$\\{HADOOP_HEAPSIZE\\}m \\$HADOOP_CLIENT_OPTS\"";
+    String newAuxJarPath = "";
+    return hiveEnvContent.replaceAll(oldHeapSizeRegex, Matcher.quoteReplacement(newAuxJarPath));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c55b4a61/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index b78dd4b..63b3d7e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -274,12 +274,13 @@ start_metastore_path = format("{tmp_dir}/start_metastore_script")
 
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
-else:
-  hive_heapsize = config['configurations']['hive-env']['hive.client.heapsize']
+if Script.is_hdp_stack_less_than("2.2"):
+  if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
+    hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
+  else:
+    hive_heapsize = config['configurations']['hive-env']['hive.client.heapsize']
 
-hive_metastore_heapsize = config['configurations']['hive-env']['hive.metastore.heapsize']
+  hive_metastore_heapsize = config['configurations']['hive-env']['hive.metastore.heapsize']
 
 java64_home = config['hostLevelParams']['java_home']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c55b4a61/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog212Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog212Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog212Test.java
new file mode 100644
index 0000000..fae8985
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog212Test.java
@@ -0,0 +1,272 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
+import com.google.inject.AbstractModule;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.dao.ClusterStateDAO;
+import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
+import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.orm.entities.ClusterEntity;
+import org.apache.ambari.server.orm.entities.ClusterServiceEntity;
+import org.apache.ambari.server.orm.entities.ClusterStateEntity;
+import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.HostComponentAdminState;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.easymock.Capture;
+import org.easymock.EasyMockSupport;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
+import org.powermock.api.easymock.PowerMock;
+
+/**
+ * {@link org.apache.ambari.server.upgrade.UpgradeCatalog212} unit tests.
+ */
+public class UpgradeCatalog212Test {
+  private Injector injector;
+  private Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
+  private EntityManager entityManager = createNiceMock(EntityManager.class);
+  private UpgradeCatalogHelper upgradeCatalogHelper;
+  private StackEntity desiredStackEntity;
+
+  @Before
+  public void init() {
+    reset(entityManagerProvider);
+    expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
+    replay(entityManagerProvider);
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+
+    upgradeCatalogHelper = injector.getInstance(UpgradeCatalogHelper.class);
+    // inject AmbariMetaInfo to ensure that stacks get populated in the DB
+    injector.getInstance(AmbariMetaInfo.class);
+    // load the stack entity
+    StackDAO stackDAO = injector.getInstance(StackDAO.class);
+    desiredStackEntity = stackDAO.find("HDP", "2.2.0");
+  }
+
+  @After
+  public void tearDown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  @Test
+  public void testExecuteDMLUpdates() throws Exception {
+    Method addMissingConfigs = UpgradeCatalog212.class.getDeclaredMethod("addMissingConfigs");
+    Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
+
+    UpgradeCatalog212 upgradeCatalog212 = createMockBuilder(UpgradeCatalog212.class)
+            .addMockedMethod(addNewConfigurationsFromXml)
+            .addMockedMethod(addMissingConfigs)
+            .createMock();
+
+    upgradeCatalog212.addNewConfigurationsFromXml();
+    expectLastCall().once();
+
+    upgradeCatalog212.addMissingConfigs();
+    expectLastCall().once();
+
+    replay(upgradeCatalog212);
+
+    upgradeCatalog212.executeDMLUpdates();
+
+    verify(upgradeCatalog212);
+  }
+
+  @Test
+  public void testUpdateHiveConfigs() throws Exception {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
+
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+    final Config mockHiveEnv = easyMockSupport.createNiceMock(Config.class);
+    final Config mockHiveSite = easyMockSupport.createNiceMock(Config.class);
+
+    final Map<String, String> propertiesExpectedHiveEnv = new HashMap<String, String>();
+    final Map<String, String> propertiesExpectedHiveSite = new HashMap<String, String>() {{
+      put("hive.heapsize", "512");
+      put("hive.server2.custom.authentication.class", "");
+    }};
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(ConfigHelper.class).toInstance(mockConfigHelper);
+        bind(Clusters.class).toInstance(mockClusters);
+
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).once();
+
+    expect(mockClusterExpected.getDesiredConfigByType("hive-site")).andReturn(mockHiveSite).atLeastOnce();
+    expect(mockHiveSite.getProperties()).andReturn(propertiesExpectedHiveSite).atLeastOnce();
+
+    StackId stackId = new StackId("HDP-2.2");
+    expect(mockClusterExpected.getCurrentStackVersion()).andReturn(stackId).atLeastOnce();
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog212.class).updateHiveConfigs();
+    easyMockSupport.verifyAll();
+
+  }
+
+  @Test
+  public void testUpdateHiveEnvContent() throws Exception {
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+    String testContent = " if [ \"$SERVICE\" = \"cli\" ]; then\n" +
+            "   if [ -z \"$DEBUG\" ]; then\n" +
+            "     export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n" +
+            "   else\n" +
+            "     export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n" +
+            "   fi\n" +
+            " fi\n" +
+            "\n" +
+            "export HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\n" +
+            "export HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n" +
+            "\n" +
+            "# Set HADOOP_HOME to point to a specific hadoop install directory\n" +
+            "HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n";
+    String expectedResult = " if [ \"$SERVICE\" = \"cli\" ]; then\n" +
+            "   if [ -z \"$DEBUG\" ]; then\n" +
+            "     export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n" +
+            "   else\n" +
+            "     export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n" +
+            "   fi\n" +
+            " fi\n" +
+            "\n" +
+            "\n" +
+            "\n" +
+            "# Set HADOOP_HOME to point to a specific hadoop install directory\n" +
+            "HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n";
+    assertEquals(expectedResult, mockInjector.getInstance(UpgradeCatalog212.class).updateHiveEnvContent(testContent));
+  }
+  /**
+   * @param dbAccessor
+   * @return
+   */
+  private AbstractUpgradeCatalog getUpgradeCatalog(final DBAccessor dbAccessor) {
+    Module module = new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(EntityManager.class).toInstance(entityManager);
+        binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    };
+
+    Injector injector = Guice.createInjector(module);
+    return injector.getInstance(UpgradeCatalog212.class);
+  }
+
+  @Test
+  public void testGetSourceVersion() {
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
+    Assert.assertEquals("2.1.1", upgradeCatalog.getSourceVersion());
+  }
+
+  @Test
+  public void testGetTargetVersion() throws Exception {
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
+
+    Assert.assertEquals("2.1.2", upgradeCatalog.getTargetVersion());
+  }
+
+}