You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ec...@apache.org on 2013/11/26 16:47:30 UTC

[26/38] git commit: ACCUMULO-873 removed hdfs info from accumulo monitor page

ACCUMULO-873 removed hdfs info from accumulo monitor page

git-svn-id: https://svn.apache.org/repos/asf/accumulo/trunk@1427887 13f79535-47bb-0310-9956-ffa450edef68
(cherry picked from commit e05ca37e5f89b620c990723b2a41396415fdb8b9)

Reason: Polish
Author: Billie Rinaldi <bi...@apache.org>
Ref: ACCUMULO-1792

Signed-off-by: Eric Newton <er...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/5d22af49
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/5d22af49
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/5d22af49

Branch: refs/heads/1.5.1-SNAPSHOT
Commit: 5d22af4903f49682db98f39d46509f089aeebe86
Parents: 4c16d8b
Author: Billie Rinaldi <bi...@apache.org>
Authored: Wed Jan 2 18:07:53 2013 +0000
Committer: Eric Newton <er...@gmail.com>
Committed: Mon Nov 25 16:06:42 2013 -0500

----------------------------------------------------------------------
 .../server/monitor/servlets/DefaultServlet.java | 40 --------------------
 1 file changed, 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/5d22af49/src/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
----------------------------------------------------------------------
diff --git a/src/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java b/src/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
index 9591d4a..5603f69 100644
--- a/src/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
+++ b/src/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
@@ -50,10 +50,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.FSConstants;
-import org.apache.hadoop.ipc.RemoteException;
 
 public class DefaultServlet extends BasicServlet {
   
@@ -213,10 +209,6 @@ public class DefaultServlet extends BasicServlet {
     sb.append("</td>\n");
     
     sb.append("<td class='noborder'>\n");
-    doHdfsTable(sb);
-    sb.append("</td>\n");
-    
-    sb.append("<td class='noborder'>\n");
     doZooKeeperTable(sb);
     sb.append("</td>\n");
     
@@ -296,38 +288,6 @@ public class DefaultServlet extends BasicServlet {
     sb.append("</table>\n");
   }
   
-  private void doHdfsTable(StringBuilder sb) throws IOException {
-    // HDFS
-    Configuration conf = CachedConfiguration.getInstance();
-    DistributedFileSystem fs = (DistributedFileSystem) FileSystem.get(conf);
-    String httpAddress = conf.get("dfs.http.address");
-    String port = httpAddress.split(":")[1];
-    String href = "http://" + fs.getUri().getHost() + ":" + port;
-    String liveUrl = href + "/dfsnodelist.jsp?whatNodes=LIVE";
-    String deadUrl = href + "/dfsnodelist.jsp?whatNodes=DEAD";
-    sb.append("<table>\n");
-    sb.append("<tr><th colspan='2'><a href='" + href + "'>NameNode</a></th></tr>\n");
-    try {
-      boolean highlight = false;
-      tableRow(sb, (highlight = !highlight), "Unreplicated&nbsp;Capacity", bytes(fs.getRawCapacity()));
-      tableRow(sb, (highlight = !highlight), "%&nbsp;Used", NumberType.commas(fs.getRawUsed() * 100. / fs.getRawCapacity(), 0, 90, 0, 100) + "%");
-      tableRow(sb, (highlight = !highlight), "Corrupt&nbsp;Blocks", NumberType.commas(fs.getCorruptBlocksCount(), 0, 0));
-      DatanodeInfo[] liveNodes = fs.getClient().datanodeReport(FSConstants.DatanodeReportType.LIVE);
-      DatanodeInfo[] deadNodes = fs.getClient().datanodeReport(FSConstants.DatanodeReportType.DEAD);
-      tableRow(sb, (highlight = !highlight), "<a href='" + liveUrl + "'>Live&nbsp;Data&nbsp;Nodes</a>", NumberType.commas(liveNodes.length));
-      tableRow(sb, (highlight = !highlight), "<a href='" + deadUrl + "'>Dead&nbsp;Data&nbsp;Nodes</a>", NumberType.commas(deadNodes.length));
-      long count = 0;
-      for (DatanodeInfo stat : liveNodes)
-        count += stat.getXceiverCount();
-      tableRow(sb, (highlight = !highlight), "Xceivers", NumberType.commas(count));
-    } catch (RemoteException ex) {
-      sb.append("<tr><td colspan='2'>Permission&nbsp;Denied</td></tr>\n");
-    } catch (Exception ex) {
-      sb.append("<tr><td colspan='2'><span class='error'>Down</span></td></tr>\n");
-    }
-    sb.append("</table>\n");
-  }
-  
   private void doZooKeeperTable(StringBuilder sb) throws IOException {
     // Zookeepers
     sb.append("<table>\n");