You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2016/06/28 11:22:56 UTC

[2/2] hadoop git commit: HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)

(cherry picked from commit 2a0082c51da7cbe2770eddb5f72cd7f8d72fa5f6)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1e347631
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1e347631
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1e347631

Branch: refs/heads/branch-2
Commit: 1e347631817d882353bfb91d68f109cb8232e8c4
Parents: 0a1a2ce
Author: Vinayakumar B <vi...@apache.org>
Authored: Tue Jun 28 16:49:39 2016 +0530
Committer: Vinayakumar B <vi...@apache.org>
Committed: Tue Jun 28 16:51:00 2016 +0530

----------------------------------------------------------------------
 .../hdfs/server/datanode/BPServiceActor.java    |  46 +++++++
 .../hadoop/hdfs/server/datanode/DataNode.java   |  26 ++++
 .../hdfs/server/datanode/DataNodeMXBean.java    |  20 ++-
 .../src/main/webapps/datanode/datanode.html     | 129 +++++++++++++++++++
 .../hadoop-hdfs/src/main/webapps/datanode/dn.js |  70 ++++++++++
 .../src/main/webapps/datanode/index.html        |  48 +------
 .../server/datanode/TestDataNodeMXBean.java     |   4 +
 7 files changed, 297 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
index 99874dd..70004e0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
@@ -26,6 +26,7 @@ import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.util.VersionUtil;
@@ -138,6 +140,10 @@ class BPServiceActor implements Runnable {
         || runningState == BPServiceActor.RunningState.CONNECTING;
   }
 
+  String getRunningState() {
+    return runningState.toString();
+  }
+
   @Override
   public String toString() {
     return bpos.toString() + " service to " + nnAddr;
@@ -147,6 +153,22 @@ class BPServiceActor implements Runnable {
     return nnAddr;
   }
 
+  private String getNameNodeAddress() {
+    return NetUtils.getHostPortString(getNNSocketAddress());
+  }
+
+  Map<String, String> getActorInfoMap() {
+    final Map<String, String> info = new HashMap<String, String>();
+    info.put("NamenodeAddress", getNameNodeAddress());
+    info.put("BlockPoolID", bpos.getBlockPoolId());
+    info.put("ActorState", getRunningState());
+    info.put("LastHeartbeat",
+        String.valueOf(getScheduler().getLastHearbeatTime()));
+    info.put("LastBlockReport",
+        String.valueOf(getScheduler().getLastBlockReportTime()));
+    return info;
+  }
+
   private final CountDownLatch initialRegistrationComplete;
   private final LifelineSender lifelineSender;
 
@@ -379,6 +401,7 @@ class BPServiceActor implements Runnable {
                   (nCmds + " commands: " + Joiner.on("; ").join(cmds)))) +
           ".");
     }
+    scheduler.updateLastBlockReportTime(monotonicNow());
     scheduler.scheduleNextBlockReport();
     return cmds.size() == 0 ? null : cmds;
   }
@@ -425,6 +448,7 @@ class BPServiceActor implements Runnable {
                 " storage reports from service actor: " + this);
     }
     
+    scheduler.updateLastHeartbeatTime(monotonicNow());
     VolumeFailureSummary volumeFailureSummary = dn.getFSDataset()
         .getVolumeFailureSummary();
     int numFailedVolumes = volumeFailureSummary != null ?
@@ -996,6 +1020,12 @@ class BPServiceActor implements Runnable {
     volatile long nextLifelineTime = monotonicNow();
 
     @VisibleForTesting
+    volatile long lastBlockReportTime = monotonicNow();
+
+    @VisibleForTesting
+    volatile long lastHeartbeatTime = monotonicNow();
+
+    @VisibleForTesting
     boolean resetBlockReportTime = true;
 
     private final AtomicBoolean forceFullBlockReport =
@@ -1033,6 +1063,22 @@ class BPServiceActor implements Runnable {
       return nextHeartbeatTime;
     }
 
+    void updateLastHeartbeatTime(long heartbeatTime) {
+      lastHeartbeatTime = heartbeatTime;
+    }
+
+    void updateLastBlockReportTime(long blockReportTime) {
+      lastBlockReportTime = blockReportTime;
+    }
+
+    long getLastHearbeatTime() {
+      return (monotonicNow() - lastHeartbeatTime)/1000;
+    }
+
+    long getLastBlockReportTime() {
+      return (monotonicNow() - lastBlockReportTime)/1000;
+    }
+
     long scheduleNextLifeline(long baseTime) {
       // Numerical overflow is possible here and is okay.
       nextLifelineTime = baseTime + lifelineIntervalMs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 1cc01cb..e7b72f7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -2845,6 +2845,13 @@ public class DataNode extends ReconfigurableBase
   }
 
   @Override // DataNodeMXBean
+  public String getDataPort(){
+    InetSocketAddress dataAddr = NetUtils.createSocketAddr(
+        this.getConf().get(DFS_DATANODE_ADDRESS_KEY));
+    return Integer.toString(dataAddr.getPort());
+  }
+
+  @Override // DataNodeMXBean
   public String getHttpPort(){
     return this.getConf().get("dfs.datanode.info.port");
   }
@@ -2884,6 +2891,25 @@ public class DataNode extends ReconfigurableBase
   }
 
   /**
+   * Returned information is a JSON representation of an array,
+   * each element of the array is a map contains the information
+   * about a block pool service actor.
+   */
+  @Override // DataNodeMXBean
+  public String getBPServiceActorInfo() {
+    final ArrayList<Map<String, String>> infoArray =
+        new ArrayList<Map<String, String>>();
+    for (BPOfferService bpos : blockPoolManager.getAllNamenodeThreads()) {
+      if (bpos != null) {
+        for (BPServiceActor actor : bpos.getBPServiceActors()) {
+          infoArray.add(actor.getActorInfoMap());
+        }
+      }
+    }
+    return JSON.toString(infoArray);
+  }
+
+  /**
    * Returned information is a JSON representation of a map with 
    * volume name as the key and value is a map of volume attribute 
    * keys to its values

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNodeMXBean.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNodeMXBean.java
index 92abd88..6b5428b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNodeMXBean.java
@@ -50,14 +50,28 @@ public interface DataNodeMXBean {
    * @return the http port
    */
   public String getHttpPort();
-  
+
+  /**
+   * Gets the data port.
+   *
+   * @return the data port
+   */
+  String getDataPort();
+
   /**
-   * Gets the namenode IP addresses
+   * Gets the namenode IP addresses.
    * 
    * @return the namenode IP addresses that the datanode is talking to
    */
   public String getNamenodeAddresses();
-  
+
+  /**
+   * Gets information of the block pool service actors.
+   *
+   * @return block pool service actors info
+   */
+  String getBPServiceActorInfo();
+
   /**
    * Gets the information of each volume on the Datanode. Please
    * see the implementation for the format of returned information.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html
new file mode 100644
index 0000000..22a2733
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html
@@ -0,0 +1,129 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="X-UA-Compatible" content="IE=9" />
+<link rel="stylesheet" type="text/css" href="/static/bootstrap-3.0.2/css/bootstrap.min.css" />
+<link rel="stylesheet" type="text/css" href="/static/hadoop.css" />
+<title>DataNode Information</title>
+</head>
+<body>
+
+<header class="navbar navbar-inverse bs-docs-nav" role="banner">
+<div class="container">
+  <div class="navbar-header">
+    <div class="navbar-brand">Hadoop</div>
+  </div>
+
+  <ul class="nav navbar-nav" id="ui-tabs">
+    <li><a href="#tab-overview">Overview</a></li>
+    <li class="dropdown">
+      <a href="#" class="dropdown-toggle" data-toggle="dropdown">Utilities <b class="caret"></b></a>
+      <ul class="dropdown-menu">
+        <li><a href="logs">Logs</a></li>
+      </ul>
+    </li>
+  </ul>
+</div>
+</header>
+
+<div class="container">
+
+<div id="alert-panel">
+  <div class="alert alert-danger">
+    <button type="button" class="close" onclick="$('#alert-panel').hide();">&times;</button>
+    <div class="alert-body" id="alert-panel-body"></div>
+  </div>
+</div>
+
+<div class="tab-content">
+  <div class="tab-pane" id="tab-overview"></div>
+</div>
+
+<div class="row">
+  <hr />
+  <div class="col-xs-2"><p>Hadoop, {release-year-token}.</p></div>
+</div>
+</div>
+
+<script type="text/x-dust-template" id="tmpl-dn">
+{#dn}
+<div class="page-header"><h1>DataNode on <small>{HostName}:{DataPort}</small></h1></div>
+<table class="table table-bordered table-striped">
+  <tr><th>Cluster ID:</th><td>{ClusterId}</td></tr>
+  <tr><th>Version:</th><td>{Version}</td></tr>
+</table>
+{/dn}
+
+<div class="page-header"><h1>Block Pools</h1></div>
+<table class="table">
+  <thead>
+    <tr>
+      <th>Namenode Address</th>
+      <th>Block Pool ID</th>
+      <th>Actor State</th>
+      <th>Last Heartbeat</th>
+      <th>Last Block Report</th>
+    </tr>
+  </thead>
+  {#dn.BPServiceActorInfo}
+    <tr>
+      <td>{NamenodeAddress}</td>
+      <td>{BlockPoolID}</td>
+      <td>{ActorState}</td>
+      <td>{LastHeartbeat}s</td>
+      <td>{#helper_relative_time value="{LastBlockReport}"/}</td>
+    </tr>
+  {/dn.BPServiceActorInfo}
+</table>
+
+<div class="page-header"><h1>Volume Information</h1></div>
+<table class="table">
+  <thead>
+    <tr>
+      <th>Directory</th>
+      <th>Capacity Used</th>
+      <th>Capacity Left</th>
+      <th>Capacity Reserved</th>
+      <th>Reserved Space for Replicas</th>
+      <th>Blocks</th>
+    </tr>
+  </thead>
+  {#dn.VolumeInfo}
+    <tr>
+      <td>{name}</td>
+      <td>{usedSpace|fmt_bytes}</td>
+      <td>{freeSpace|fmt_bytes}</td>
+      <td>{reservedSpace|fmt_bytes}</td>
+      <td>{reservedSpaceForReplicas|fmt_bytes}</td>
+      <td>{numBlocks}</td>
+    </tr>
+  {/dn.VolumeInfo}
+</script>
+
+<script type="text/javascript" src="/static/jquery-1.10.2.min.js"></script>
+<script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js"></script>
+<script type="text/javascript" src="/static/moment.min.js"></script>
+<script type="text/javascript" src="/static/dust-full-2.0.0.min.js"></script>
+<script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js"></script>
+<script type="text/javascript" src="/static/dfs-dust.js"></script>
+<script type="text/javascript" src="dn.js"></script>
+
+</body>
+</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/dn.js
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/dn.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/dn.js
new file mode 100644
index 0000000..ea963cc
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/dn.js
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function () {
+  "use strict";
+
+  var data = {};
+
+  dust.loadSource(dust.compile($('#tmpl-dn').html(), 'dn'));
+
+  function load() {
+    $.get('/jmx?qry=Hadoop:service=DataNode,name=DataNodeInfo', function(resp) {
+      data.dn = workaround(resp.beans[0]);
+      data.dn.HostName=window.location.hostname;
+      render();
+    }).fail(show_err_msg);
+  }
+
+  function workaround(dn) {
+    function node_map_to_array(nodes) {
+      var res = [];
+      for (var n in nodes) {
+        var p = nodes[n];
+        p.name = n;
+        res.push(p);
+      }
+      return res;
+    }
+
+    dn.VolumeInfo = node_map_to_array(JSON.parse(dn.VolumeInfo));
+    dn.BPServiceActorInfo = JSON.parse(dn.BPServiceActorInfo);
+
+    return dn;
+  }
+
+  function render() {
+    var base = dust.makeBase({
+      'helper_relative_time' : function (chunk, ctx, bodies, params) {
+        var value = dust.helpers.tap(params.value, chunk, ctx);
+        return chunk.write(moment().subtract(Number(value), 'seconds').fromNow(true));
+      }
+    });
+    dust.render('dn', base.push(data), function(err, out) {
+      $('#tab-overview').html(out);
+      $('#tab-overview').addClass('active');
+    });
+  }
+
+  function show_err_msg() {
+    $('#alert-panel-body').html("Failed to load datanode information");
+    $('#alert-panel').show();
+  }
+
+  load();
+
+})();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/index.html
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/index.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/index.html
index a88bc9b..fee51be 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/index.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/index.html
@@ -1,5 +1,3 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
-    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more
    contributor license agreements.  See the NOTICE file distributed with
@@ -16,47 +14,11 @@
    See the License for the specific language governing permissions and
    limitations under the License.
 -->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
 <html xmlns="http://www.w3.org/1999/xhtml">
 <head>
-<meta http-equiv="X-UA-Compatible" content="IE=9" />
-<link rel="stylesheet" type="text/css" href="/static/bootstrap-3.0.2/css/bootstrap.min.css" />
-<link rel="stylesheet" type="text/css" href="/static/hadoop.css" />
-<title>DataNode Information</title>
+  <meta http-equiv="REFRESH" content="0;url=datanode.html" />
+  <title>Hadoop Administration</title>
 </head>
-<body>
-
-<header class="navbar navbar-inverse bs-docs-nav" role="banner">
-<div class="container">
-  <div class="navbar-header">
-    <div class="navbar-brand">Hadoop</div>
-  </div>
-
-  <ul class="nav navbar-nav" id="ui-tabs">
-    <li><a>Overview</a></li>
-  </ul>
-</div>
-</header>
-
-<div class="container">
-
-<div class="tab-content">
-  <div class="tab-pane" id="tab-overview">
-    <div class="page-header"><h1>DataNode on <small><div id="authority" style="display: inline-block"></div></small></h1></div>
-  </div>
-</div>
-
-<div class="row">
-  <hr />
-  <div class="col-xs-2"><p>Hadoop, {release-year-token}.</p></div>
-</div>
-</div>
-
-<script type="text/javascript" src="/static/jquery-1.10.2.min.js">
-</script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
-</script>
-<script type="text/javascript">
-$('#authority').html(window.location.host);
-$('#tab-overview').addClass('active');
-</script>
-</body>
-</html>
+</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1e347631/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java
index 9f5a471..24fe336 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java
@@ -78,6 +78,10 @@ public class TestDataNodeMXBean {
       int xceiverCount = (Integer)mbs.getAttribute(mxbeanName,
           "XceiverCount");
       Assert.assertEquals(datanode.getXceiverCount(), xceiverCount);
+
+      String bpActorInfo = (String)mbs.getAttribute(mxbeanName,
+          "BPServiceActorInfo");
+      Assert.assertEquals(datanode.getBPServiceActorInfo(), bpActorInfo);
     } finally {
       if (cluster != null) {cluster.shutdown();}
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org