You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2016/04/14 19:25:38 UTC

ambari git commit: AMBARI-15880. ATS heapsize cannot be changed in Ambari-2.2.1.1 or greater post Ambari Server upgrade, even though the bug should be fixed (aonishuk)

Repository: ambari
Updated Branches:
  refs/heads/trunk d63147ff0 -> 76f831cdf


AMBARI-15880. ATS heapsize cannot be changed in Ambari-2.2.1.1 or greater post Ambari Server upgrade, even though the bug should be fixed (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/76f831cd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/76f831cd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/76f831cd

Branch: refs/heads/trunk
Commit: 76f831cdf2c5d62518db6d6075bd8632b5350fc3
Parents: d63147f
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Apr 14 20:25:37 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Apr 14 20:25:37 2016 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog240.java       | 50 ++++++++++++++-
 .../server/upgrade/UpgradeCatalog240Test.java   | 66 ++++++++++++++++++++
 2 files changed, 114 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/76f831cd/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 2878b0e..ddaff84 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -89,6 +89,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   protected static final String STACK_TABLE = "stack";
   protected static final String CLUSTER_TABLE = "clusters";
   protected static final String CLUSTER_UPGRADE_ID_COLUMN = "upgrade_id";
+  protected static final String YARN_ENV_CONFIG = "yarn-env";
   public static final String DESIRED_VERSION_COLUMN_NAME = "desired_version";
   public static final String BLUEPRINT_SETTING_TABLE = "blueprint_setting";
   public static final String BLUEPRINT_NAME_COL = "blueprint_name";
@@ -195,6 +196,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     updateClusterEnv();
     updateHostRoleCommandTableDML();
     updateKerberosConfigs();
+    updateYarnEnv();
   }
 
   private void createSettingTable() throws SQLException {
@@ -816,10 +818,10 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
    */
   protected void updateAlertCurrentTable() throws SQLException {
     dbAccessor.addColumn(ALERT_CURRENT_TABLE,
-        new DBColumnInfo(ALERT_CURRENT_OCCURRENCES_COLUMN, Long.class, null, 1, false));
+            new DBColumnInfo(ALERT_CURRENT_OCCURRENCES_COLUMN, Long.class, null, 1, false));
 
     dbAccessor.addColumn(ALERT_CURRENT_TABLE, new DBColumnInfo(ALERT_CURRENT_FIRMNESS_COLUMN,
-        String.class, 255, AlertFirmness.HARD.name(), false));
+            String.class, 255, AlertFirmness.HARD.name(), false));
   }
 
   protected void setRoleSortOrder() throws SQLException {
@@ -1161,6 +1163,50 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     }
   }
 
+
+  /**
+   * Updates {@code yarn-env} in the following ways:
+   * <ul>
+   * <li>Replays export YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}} to export
+   * YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}</li>
+   * </ul>
+   *
+   * @throws Exception
+   */
+  protected void updateYarnEnv() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(
+            AmbariManagementController.class);
+
+    Clusters clusters = ambariManagementController.getClusters();
+
+    Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
+    for (final Cluster cluster : clusterMap.values()) {
+      Config yarnEnvConfig = cluster.getDesiredConfigByType(YARN_ENV_CONFIG);
+      Map<String, String> yarnEnvProps = new HashMap<String, String>();
+      if (yarnEnvConfig != null) {
+        String content = yarnEnvConfig.getProperties().get("content");
+        // comment old property
+        content = content.replaceAll("export YARN_HISTORYSERVER_HEAPSIZE=\\{\\{apptimelineserver_heapsize\\}\\}",
+                "# export YARN_HISTORYSERVER_HEAPSIZE=\\{\\{apptimelineserver_heapsize\\}\\}");
+        // add new correct property
+        content = content + "\n\n      # Specify the max Heapsize for the timeline server using a numerical value\n" +
+                "      # in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set\n" +
+                "      # the value to 1024.\n" +
+                "      # This value will be overridden by an Xmx setting specified in either YARN_OPTS\n" +
+                "      # and/or YARN_TIMELINESERVER_OPTS.\n" +
+                "      # If not specified, the default value will be picked from either YARN_HEAPMAX\n" +
+                "      # or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\n" +
+                "      export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}";
+
+        yarnEnvProps.put("content", content);
+        updateConfigurationPropertiesForCluster(cluster, YARN_ENV_CONFIG, yarnEnvProps, true, true);
+      }
+
+    }
+
+  }
+
+
   /**
    * Updates the Kerberos-related configurations for the clusters managed by this Ambari
    * <p/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/76f831cd/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index 73bfa74..cfb57ab 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -371,6 +371,7 @@ public class UpgradeCatalog240Test {
     Method updateClusterEnv = UpgradeCatalog240.class.getDeclaredMethod("updateClusterEnv");
     Method updateHostRoleCommandTableDML = UpgradeCatalog240.class.getDeclaredMethod("updateHostRoleCommandTableDML");
     Method updateKerberosEnv = UpgradeCatalog240.class.getDeclaredMethod("updateKerberosConfigs");
+    Method updateYarnEnv = UpgradeCatalog240.class.getDeclaredMethod("updateYarnEnv");
 
     Capture<String> capturedStatements = newCapture(CaptureType.ALL);
 
@@ -387,6 +388,7 @@ public class UpgradeCatalog240Test {
             .addMockedMethod(updateClusterEnv)
             .addMockedMethod(updateHostRoleCommandTableDML)
             .addMockedMethod(updateKerberosEnv)
+            .addMockedMethod(updateYarnEnv)
             .createMock();
 
     Field field = AbstractUpgradeCatalog.class.getDeclaredField("dbAccessor");
@@ -401,6 +403,7 @@ public class UpgradeCatalog240Test {
     upgradeCatalog240.updateClusterEnv();
     upgradeCatalog240.updateHostRoleCommandTableDML();
     upgradeCatalog240.updateKerberosConfigs();
+    upgradeCatalog240.updateYarnEnv();
 
     replay(upgradeCatalog240, dbAccessor);
 
@@ -569,6 +572,69 @@ public class UpgradeCatalog240Test {
   }
 
   @Test
+  public void testYarnEnvUpdateConfigs() throws Exception{
+
+    Map<String, String> oldPropertiesYarnEnv = new HashMap<String, String>() {
+      {
+        put("content", "export YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}}");
+      }
+    };
+    Map<String, String> newPropertiesYarnEnv = new HashMap<String, String>() {
+      {
+        put("content", "# export YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}}" +
+                "\n\n      # Specify the max Heapsize for the timeline server using a numerical value\n" +
+                "      # in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set\n" +
+                "      # the value to 1024.\n" +
+                "      # This value will be overridden by an Xmx setting specified in either YARN_OPTS\n" +
+                "      # and/or YARN_TIMELINESERVER_OPTS.\n" +
+                "      # If not specified, the default value will be picked from either YARN_HEAPMAX\n" +
+                "      # or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\n" +
+                "      export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}");
+      }
+    };
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+    Config mockYarnEnv = easyMockSupport.createNiceMock(Config.class);
+
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).once();
+    expect(cluster.getDesiredConfigByType("yarn-env")).andReturn(mockYarnEnv).atLeastOnce();
+    expect(mockYarnEnv.getProperties()).andReturn(oldPropertiesYarnEnv).anyTimes();
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class)).anyTimes();
+
+    replay(injector, clusters, mockYarnEnv, cluster);
+
+    AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+            .addMockedMethod("createConfiguration")
+            .addMockedMethod("getClusters", new Class[] { })
+            .addMockedMethod("createConfig")
+            .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+            .createNiceMock();
+
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+    expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(propertiesCapture), anyString(),
+            anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+
+    replay(controller, injector2);
+    new UpgradeCatalog240(injector2).updateYarnEnv();
+    easyMockSupport.verifyAll();
+
+    Map<String, String> updatedProperties = propertiesCapture.getValue();
+    assertTrue(Maps.difference(newPropertiesYarnEnv, updatedProperties).areEqual());
+  }
+
+  @Test
   public void testAmsHbaseEnvUpdateConfigs() throws Exception{
 
     Map<String, String> oldPropertiesAmsHbaseEnv = new HashMap<String, String>() {