You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by av...@apache.org on 2017/09/13 02:54:17 UTC

[2/3] ambari git commit: AMBARI-21939 : Ambari HDFS Heatmaps are not showing data (Not Applicable) for bytes read, bytes written & DataNode Process Network I/O Utilization. (avijayan)

AMBARI-21939 : Ambari HDFS Heatmaps are not showing data (Not Applicable) for bytes read, bytes written & DataNode Process Network I/O Utilization. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a49121b9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a49121b9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a49121b9

Branch: refs/heads/trunk
Commit: a49121b9af276f45ef26e0b320de48d6ef6e7563
Parents: 7fa7a6c
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Sep 12 17:07:14 2017 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Sep 12 17:07:14 2017 -0700

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog260.java       | 16 ++++
 .../PERF/1.0/services/FAKEHDFS/widgets.json     | 48 +++++------
 .../server/upgrade/UpgradeCatalog260Test.java   | 87 ++++++++++++++++++++
 3 files changed, 127 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a49121b9/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index 2de85fc..b17569e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -21,6 +21,7 @@ import static org.apache.ambari.server.view.ViewContextImpl.CORE_SITE;
 
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -401,6 +402,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
     ensureZeppelinProxyUserConfigs();
     updateKerberosDescriptorArtifacts();
     updateAmsConfigs();
+    updateHDFSWidgetDefinition();
   }
 
   public int getCurrentVersionID() throws AmbariException, SQLException {
@@ -565,4 +567,18 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
       }
     }
   }
+
+  protected void updateHDFSWidgetDefinition() throws AmbariException {
+    LOG.info("Updating HDFS widget definition.");
+
+    Map<String, List<String>> widgetMap = new HashMap<>();
+    Map<String, String> sectionLayoutMap = new HashMap<>();
+
+    List<String> hdfsHeatmapWidgets = new ArrayList<>(Arrays.asList("HDFS Bytes Read", "HDFS Bytes Written",
+      "DataNode Process Disk I/O Utilization", "DataNode Process Network I/O Utilization"));
+    widgetMap.put("HDFS_HEATMAPS", hdfsHeatmapWidgets);
+    sectionLayoutMap.put("HDFS_HEATMAPS", "default_hdfs_heatmap");
+
+    updateWidgetDefinitionsForService("HDFS", widgetMap, sectionLayoutMap);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a49121b9/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
index 7a793f8..205a364 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
@@ -416,8 +416,8 @@
           "is_visible": true,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             }
@@ -425,7 +425,7 @@
           "values": [
             {
               "name": "FAKEHDFS Bytes Read",
-              "value": "${dfs.datanode.BytesRead._rate}"
+              "value": "${dfs.datanode.BytesRead}"
             }
           ],
           "properties": {
@@ -440,8 +440,8 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             }
@@ -449,7 +449,7 @@
           "values": [
             {
               "name": "FAKEHDFS Bytes Written",
-              "value": "${dfs.datanode.BytesWritten._rate}"
+              "value": "${dfs.datanode.BytesWritten}"
             }
           ],
           "properties": {
@@ -537,26 +537,26 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             },
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             },
             {
-              "name": "dfs.datanode.TotalReadTime._rate",
-              "metric_path": "metrics/dfs/datanode/TotalReadTime._rate",
+              "name": "dfs.datanode.TotalReadTime",
+              "metric_path": "metrics/dfs/datanode/TotalReadTime",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             },
             {
-              "name": "dfs.datanode.TotalWriteTime._rate",
-              "metric_path": "metrics/dfs/datanode/TotalWriteTime._rate",
+              "name": "dfs.datanode.TotalWriteTime",
+              "metric_path": "metrics/dfs/datanode/TotalWriteTime",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             }
@@ -564,7 +564,7 @@
           "values": [
             {
               "name": "FAKEDataNode Process Disk I/O Utilization",
-              "value": "${((dfs.datanode.BytesRead._rate/dfs.datanode.TotalReadTime._rate)+(dfs.datanode.BytesWritten._rate/dfs.datanode.TotalWriteTime._rate))*50}"
+              "value": "${((dfs.datanode.BytesRead/dfs.datanode.TotalReadTime)+(dfs.datanode.BytesWritten/dfs.datanode.TotalWriteTime))*50}"
             }
           ],
           "properties": {
@@ -579,26 +579,26 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.RemoteBytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/RemoteBytesRead._rate",
+              "name": "dfs.datanode.RemoteBytesRead",
+              "metric_path": "metrics/dfs/datanode/RemoteBytesRead",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             },
             {
-              "name": "dfs.datanode.ReadsFromRemoteClient._rate",
-              "metric_path": "metrics/dfs/datanode/reads_from_remote_client._rate",
+              "name": "dfs.datanode.ReadsFromRemoteClient",
+              "metric_path": "metrics/dfs/datanode/reads_from_remote_client",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             },
             {
-              "name": "dfs.datanode.RemoteBytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/RemoteBytesWritten._rate",
+              "name": "dfs.datanode.RemoteBytesWritten",
+              "metric_path": "metrics/dfs/datanode/RemoteBytesWritten",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             },
             {
-              "name": "dfs.datanode.WritesFromRemoteClient._rate",
-              "metric_path": "metrics/dfs/datanode/writes_from_remote_client._rate",
+              "name": "dfs.datanode.WritesFromRemoteClient",
+              "metric_path": "metrics/dfs/datanode/writes_from_remote_client",
               "service_name": "FAKEHDFS",
               "component_name": "FAKEDATANODE"
             }
@@ -606,7 +606,7 @@
           "values": [
             {
               "name": "FAKEDataNode Process Network I/O Utilization",
-              "value": "${((dfs.datanode.RemoteBytesRead._rate/dfs.datanode.ReadsFromRemoteClient._rate)+(dfs.datanode.RemoteBytesWritten._rate/dfs.datanode.WritesFromRemoteClient._rate))*50}"
+              "value": "${((dfs.datanode.RemoteBytesRead/dfs.datanode.ReadsFromRemoteClient)+(dfs.datanode.RemoteBytesWritten/dfs.datanode.WritesFromRemoteClient))*50}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a49121b9/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index b70f37b..d9c7957 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -50,8 +50,10 @@ import java.util.Set;
 import javax.persistence.EntityManager;
 
 import com.google.common.collect.Maps;
+import com.google.inject.AbstractModule;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
@@ -60,17 +62,23 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.orm.entities.WidgetEntity;
+import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.commons.io.FileUtils;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockRunner;
@@ -80,7 +88,9 @@ import org.easymock.MockType;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 import org.junit.runner.RunWith;
 
 import com.google.gson.Gson;
@@ -142,6 +152,9 @@ public class UpgradeCatalog260Test {
   @Mock(type = MockType.NICE)
   private Injector injector;
 
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
   @Before
   public void init() {
     reset(entityManagerProvider, injector);
@@ -737,4 +750,78 @@ public class UpgradeCatalog260Test {
     assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
   }
 
+  @Test
+   public void testHDFSWidgetUpdate() throws Exception {
+         final Clusters clusters = createNiceMock(Clusters.class);
+         final Cluster cluster = createNiceMock(Cluster.class);
+         final AmbariManagementController controller = createNiceMock(AmbariManagementController.class);
+         final Gson gson = new Gson();
+         final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
+         final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
+         WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
+         StackId stackId = new StackId("HDP", "2.0.0");
+         StackInfo stackInfo = createNiceMock(StackInfo.class);
+         ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
+         Service service  = createNiceMock(Service.class);
+
+           String widgetStr = "{\n" +
+             "  \"layouts\": [\n" +
+             "      {\n" +
+             "      \"layout_name\": \"default_hdfs_heatmap\",\n" +
+             "      \"display_name\": \"Standard HDFS HeatMaps\",\n" +
+             "      \"section_name\": \"HDFS_HEATMAPS\",\n" +
+             "      \"widgetLayoutInfo\": [\n" +
+             "        {\n" +
+             "          \"widget_name\": \"HDFS Bytes Read\",\n" +
+             "          \"metrics\": [],\n" +
+             "          \"values\": []\n" +
+             "        }\n" +
+             "      ]\n" +
+             "    }\n" +
+             "  ]\n" +
+             "}";
+
+           File dataDirectory = temporaryFolder.newFolder();
+         File file = new File(dataDirectory, "hdfs_widget.json");
+         FileUtils.writeStringToFile(file, widgetStr);
+
+           final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+       @Override
+       protected void configure() {
+                 bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+                 bind(AmbariManagementController.class).toInstance(controller);
+                 bind(Clusters.class).toInstance(clusters);
+                 bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+                 bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+                 bind(Gson.class).toInstance(gson);
+                 bind(WidgetDAO.class).toInstance(widgetDAO);
+                 bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+                 bind(AmbariMetaInfo.class).toInstance(metaInfo);
+               }
+     });
+         expect(controller.getClusters()).andReturn(clusters).anyTimes();
+         expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+             put("normal", cluster);
+           }}).anyTimes();
+         expect(cluster.getServices()).andReturn(Collections.singletonMap("HDFS", service)).anyTimes();
+         expect(cluster.getClusterId()).andReturn(1L).anyTimes();
+         expect(service.getDesiredStackId()).andReturn(stackId).anyTimes();
+         expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
+         expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+         expect(metaInfo.getStack("HDP", "2.0.0")).andReturn(stackInfo).anyTimes();
+         expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
+
+           expect(widgetDAO.findByName(1L, "HDFS Bytes Read", "ambari", "HDFS_HEATMAPS"))
+             .andReturn(Collections.singletonList(widgetEntity));
+         expect(widgetDAO.merge(widgetEntity)).andReturn(null);
+         expect(widgetEntity.getWidgetName()).andReturn("HDFS Bytes Read").anyTimes();
+
+           replay(clusters, cluster, controller, widgetDAO, metaInfo, widgetEntity, stackInfo, serviceInfo, service);
+
+           mockInjector.getInstance(UpgradeCatalog260.class).updateHDFSWidgetDefinition();
+
+           verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, serviceInfo);
+       }
+
+
 }