You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2009/05/20 00:37:21 UTC

svn commit: r776490 [1/2] - in /hadoop/core/trunk: ./ src/hdfs/org/apache/hadoop/hdfs/server/common/ src/hdfs/org/apache/hadoop/hdfs/server/datanode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/webapps/datanode/ src/webapps/hdfs/ src/webapps/s...

Author: szetszwo
Date: Tue May 19 22:37:21 2009
New Revision: 776490

URL: http://svn.apache.org/viewvc?rev=776490&view=rev
Log:
HADOOP-5857. Move normal java methods from hdfs .jsp files to .java files.  (szetszwo)

Added:
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java
      - copied, changed from r776489, hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
Removed:
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
    hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp
    hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp
    hadoop/core/trunk/src/webapps/datanode/tail.jsp
    hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp
    hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp
    hadoop/core/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp
    hadoop/core/trunk/src/webapps/secondary/status.jsp

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue May 19 22:37:21 2009
@@ -370,6 +370,9 @@
     HADOOP-5866. Move DeprecatedUTF8 from o.a.h.io to o.a.h.hdfs since it may
     not be used outside hdfs. (Raghu Angadi)
 
+    HADOOP-5857. Move normal java methods from hdfs .jsp files to .java files.
+    (szetszwo)
+
   OPTIMIZATIONS
 
     HADOOP-5595. NameNode does not need to run a replicator to choose a

Copied: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java (from r776489, hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java?p2=hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java&p1=hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java&r1=776489&r2=776490&rev=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java Tue May 19 22:37:21 2009
@@ -16,12 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hdfs.server.namenode;
+package org.apache.hadoop.hdfs.server.common;
 
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.net.InetSocketAddress;
-import java.net.MalformedURLException;
 import java.net.Socket;
 import java.net.URL;
 import java.net.URLEncoder;
@@ -39,13 +38,10 @@
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
-import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
-import org.apache.hadoop.hdfs.server.common.HdfsConstants;
-import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
+import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessToken;
 import org.apache.hadoop.security.UnixUserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 
 public class JspHelper {
@@ -178,59 +174,6 @@
     out.print("</tbody></table>");
   }
 
-  public static String getSafeModeText(FSNamesystem fsn) {
-    if (!fsn.isInSafeMode())
-      return "";
-    return "Safe mode is ON. <em>" + fsn.getSafeModeTip() + "</em><br>";
-  }
-
-  public static String getWarningText(FSNamesystem fsn) {
-    // Ideally this should be displayed in RED
-    long missingBlocks = fsn.getMissingBlocksCount();
-    if (missingBlocks > 0) {
-      return "<br> WARNING :" + 
-             " There are about " + missingBlocks +
-             " missing blocks. Please check the log or run fsck. <br><br>";
-    }
-    return "";
-  }
-  
-  public static String getInodeLimitText(FSNamesystem fsn) {
-    long inodes = fsn.dir.totalInodes();
-    long blocks = fsn.getBlocksTotal();
-    long maxobjects = fsn.getMaxObjects();
-    long totalMemory = Runtime.getRuntime().totalMemory();   
-    long maxMemory = Runtime.getRuntime().maxMemory();   
-
-    long used = (totalMemory * 100)/maxMemory;
- 
-    String str = inodes + " files and directories, " +
-                 blocks + " blocks = " +
-                 (inodes + blocks) + " total";
-    if (maxobjects != 0) {
-      long pct = ((inodes + blocks) * 100)/maxobjects;
-      str += " / " + maxobjects + " (" + pct + "%)";
-    }
-    str += ".  Heap Size is " + StringUtils.byteDesc(totalMemory) + " / " + 
-           StringUtils.byteDesc(maxMemory) + 
-           " (" + used + "%) <br>";
-    return str;
-  }
-
-  public static String getUpgradeStatusText(FSNamesystem fsn) {
-    String statusText = "";
-    try {
-      UpgradeStatusReport status = 
-        fsn.distributedUpgradeProgress(UpgradeAction.GET_STATUS);
-      statusText = (status == null ? 
-          "There are no upgrades in progress." :
-            status.getStatusText(false));
-    } catch(IOException e) {
-      statusText = "Upgrade status unknown.";
-    }
-    return statusText;
-  }
-
   public static void sortNodeList(ArrayList<DatanodeDescriptor> nodes,
                            String field, String order) {
         
@@ -378,16 +321,6 @@
   }
 
   /** Return a table containing version information. */
-  public static String getVersionTable(FSNamesystem fsn) {
-    return "<div id='dfstable'><table>"       
-        + "\n  <tr><td id='col1'>Started:</td><td>" + fsn.getStartTime() + "</td></tr>\n"
-        + "\n  <tr><td id='col1'>Version:</td><td>" + VersionInfo.getVersion() + ", " + VersionInfo.getRevision()
-        + "\n  <tr><td id='col1'>Compiled:</td><td>" + VersionInfo.getDate() + " by " + VersionInfo.getUser() + " from " + VersionInfo.getBranch()
-        + "\n  <tr><td id='col1'>Upgrades:</td><td>" + getUpgradeStatusText(fsn)
-        + "\n</table></div>";
-  }
-
-  /** Return a table containing version information. */
   public static String getVersionTable() {
     return "<div id='dfstable'><table>"       
         + "\n  <tr><td id='col1'>Version:</td><td>" + VersionInfo.getVersion() + ", " + VersionInfo.getRevision()

Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java?rev=776490&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java Tue May 19 22:37:21 2009
@@ -0,0 +1,580 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.datanode;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URLEncoder;
+import java.util.Date;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.jsp.JspWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.AccessToken;
+import org.apache.hadoop.security.AccessTokenHandler;
+import org.apache.hadoop.util.StringUtils;
+
+class DatanodeJspHelper {
+  private static final DataNode datanode = DataNode.getDataNode();
+
+  static void generateDirectoryStructure(JspWriter out, HttpServletRequest req,
+      HttpServletResponse resp) throws IOException {
+    final String dir = JspHelper.validatePath(req.getParameter("dir"));
+    if (dir == null) {
+      out.print("Invalid input");
+      return;
+    }
+
+    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
+    int namenodeInfoPort = -1;
+    if (namenodeInfoPortStr != null)
+      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
+
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
+        JspHelper.conf);
+    String target = dir;
+    final FileStatus targetStatus = dfs.getFileInfo(target);
+    if (targetStatus == null) { // not exists
+      out.print("<h3>File or directory : " + target + " does not exist</h3>");
+      JspHelper.printGotoForm(out, namenodeInfoPort, target);
+    } else {
+      if (!targetStatus.isDir()) { // a file
+        List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(dir, 0, 1)
+            .getLocatedBlocks();
+
+        LocatedBlock firstBlock = null;
+        DatanodeInfo[] locations = null;
+        if (blocks.size() > 0) {
+          firstBlock = blocks.get(0);
+          locations = firstBlock.getLocations();
+        }
+        if (locations == null || locations.length == 0) {
+          out.print("Empty file");
+        } else {
+          DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock);
+          String fqdn = InetAddress.getByName(chosenNode.getHost())
+              .getCanonicalHostName();
+          String datanodeAddr = chosenNode.getName();
+          int datanodePort = Integer.parseInt(datanodeAddr.substring(
+              datanodeAddr.indexOf(':') + 1, datanodeAddr.length()));
+          String redirectLocation = "http://" + fqdn + ":"
+              + chosenNode.getInfoPort() + "/browseBlock.jsp?blockId="
+              + firstBlock.getBlock().getBlockId() + "&blockSize="
+              + firstBlock.getBlock().getNumBytes() + "&genstamp="
+              + firstBlock.getBlock().getGenerationStamp() + "&filename="
+              + URLEncoder.encode(dir, "UTF-8") + "&datanodePort="
+              + datanodePort + "&namenodeInfoPort=" + namenodeInfoPort;
+          resp.sendRedirect(redirectLocation);
+        }
+        return;
+      }
+      // directory
+      FileStatus[] files = dfs.listPaths(target);
+      // generate a table and dump the info
+      String[] headings = { "Name", "Type", "Size", "Replication",
+          "Block Size", "Modification Time", "Permission", "Owner", "Group" };
+      out.print("<h3>Contents of directory ");
+      JspHelper.printPathWithLinks(dir, out, namenodeInfoPort);
+      out.print("</h3><hr>");
+      JspHelper.printGotoForm(out, namenodeInfoPort, dir);
+      out.print("<hr>");
+
+      File f = new File(dir);
+      String parent;
+      if ((parent = f.getParent()) != null)
+        out.print("<a href=\"" + req.getRequestURL() + "?dir=" + parent
+            + "&namenodeInfoPort=" + namenodeInfoPort
+            + "\">Go to parent directory</a><br>");
+
+      if (files == null || files.length == 0) {
+        out.print("Empty directory");
+      } else {
+        JspHelper.addTableHeader(out);
+        int row = 0;
+        JspHelper.addTableRow(out, headings, row++);
+        String cols[] = new String[headings.length];
+        for (int i = 0; i < files.length; i++) {
+          // Get the location of the first block of the file
+          if (files[i].getPath().toString().endsWith(".crc"))
+            continue;
+          if (!files[i].isDir()) {
+            cols[1] = "file";
+            cols[2] = StringUtils.byteDesc(files[i].getLen());
+            cols[3] = Short.toString(files[i].getReplication());
+            cols[4] = StringUtils.byteDesc(files[i].getBlockSize());
+          } else {
+            cols[1] = "dir";
+            cols[2] = "";
+            cols[3] = "";
+            cols[4] = "";
+          }
+          String datanodeUrl = req.getRequestURL() + "?dir="
+              + URLEncoder.encode(files[i].getPath().toString(), "UTF-8")
+              + "&namenodeInfoPort=" + namenodeInfoPort;
+          cols[0] = "<a href=\"" + datanodeUrl + "\">"
+              + files[i].getPath().getName() + "</a>";
+          cols[5] = FsShell.dateForm.format(new Date((files[i]
+              .getModificationTime())));
+          cols[6] = files[i].getPermission().toString();
+          cols[7] = files[i].getOwner();
+          cols[8] = files[i].getGroup();
+          JspHelper.addTableRow(out, cols, row++);
+        }
+        JspHelper.addTableFooter(out);
+      }
+    }
+    String namenodeHost = datanode.getNameNodeAddr().getHostName();
+    out.print("<br><a href=\"http://"
+        + InetAddress.getByName(namenodeHost).getCanonicalHostName() + ":"
+        + namenodeInfoPort + "/dfshealth.jsp\">Go back to DFS home</a>");
+    dfs.close();
+  }
+
+  static void generateFileDetails(JspWriter out, HttpServletRequest req)
+      throws IOException {
+
+    long startOffset = 0;
+    int datanodePort;
+
+    final Long blockId = JspHelper.validateLong(req.getParameter("blockId"));
+    if (blockId == null) {
+      out.print("Invalid input (blockId absent)");
+      return;
+    }
+
+    String datanodePortStr = req.getParameter("datanodePort");
+    if (datanodePortStr == null) {
+      out.print("Invalid input (datanodePort absent)");
+      return;
+    }
+    datanodePort = Integer.parseInt(datanodePortStr);
+
+    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
+    int namenodeInfoPort = -1;
+    if (namenodeInfoPortStr != null)
+      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
+
+    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(
+        req.getParameter("chunkSizeToView"));
+
+    String startOffsetStr = req.getParameter("startOffset");
+    if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
+      startOffset = 0;
+    else
+      startOffset = Long.parseLong(startOffsetStr);
+
+    final String filename=JspHelper.validatePath(req.getParameter("filename"));
+    if (filename == null) {
+      out.print("Invalid input");
+      return;
+    }
+
+    String blockSizeStr = req.getParameter("blockSize");
+    long blockSize = 0;
+    if (blockSizeStr == null || blockSizeStr.length() == 0) {
+      out.print("Invalid input");
+      return;
+    }
+    blockSize = Long.parseLong(blockSizeStr);
+
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
+        JspHelper.conf);
+    List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(filename, 0,
+        Long.MAX_VALUE).getLocatedBlocks();
+    // Add the various links for looking at the file contents
+    // URL for downloading the full file
+    String downloadUrl = "http://" + req.getServerName() + ":"
+        + req.getServerPort() + "/streamFile?" + "filename="
+        + URLEncoder.encode(filename, "UTF-8");
+    out.print("<a name=\"viewOptions\"></a>");
+    out.print("<a href=\"" + downloadUrl + "\">Download this file</a><br>");
+
+    DatanodeInfo chosenNode;
+    // URL for TAIL
+    LocatedBlock lastBlk = blocks.get(blocks.size() - 1);
+    try {
+      chosenNode = JspHelper.bestNode(lastBlk);
+    } catch (IOException e) {
+      out.print(e.toString());
+      dfs.close();
+      return;
+    }
+    String fqdn = InetAddress.getByName(chosenNode.getHost())
+        .getCanonicalHostName();
+    String tailUrl = "http://" + fqdn + ":" + chosenNode.getInfoPort()
+        + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8")
+        + "&namenodeInfoPort=" + namenodeInfoPort
+        + "&chunkSizeToView=" + chunkSizeToView
+        + "&referrer=" + URLEncoder.encode(
+            req.getRequestURL() + "?" + req.getQueryString(), "UTF-8");
+    out.print("<a href=\"" + tailUrl + "\">Tail this file</a><br>");
+
+    out.print("<form action=\"/browseBlock.jsp\" method=GET>");
+    out.print("<b>Chunk size to view (in bytes, up to file's DFS block size): </b>");
+    out.print("<input type=\"hidden\" name=\"blockId\" value=\"" + blockId
+        + "\">");
+    out.print("<input type=\"hidden\" name=\"blockSize\" value=\"" + blockSize
+        + "\">");
+    out.print("<input type=\"hidden\" name=\"startOffset\" value=\""
+        + startOffset + "\">");
+    out.print("<input type=\"hidden\" name=\"filename\" value=\"" + filename
+        + "\">");
+    out.print("<input type=\"hidden\" name=\"datanodePort\" value=\""
+        + datanodePort + "\">");
+    out.print("<input type=\"hidden\" name=\"namenodeInfoPort\" value=\""
+        + namenodeInfoPort + "\">");
+    out.print("<input type=\"text\" name=\"chunkSizeToView\" value="
+        + chunkSizeToView + " size=10 maxlength=10>");
+    out.print("&nbsp;&nbsp;<input type=\"submit\" name=\"submit\" value=\"Refresh\">");
+    out.print("</form>");
+    out.print("<hr>");
+    out.print("<a name=\"blockDetails\"></a>");
+    out.print("<B>Total number of blocks: " + blocks.size() + "</B><br>");
+    // generate a table and dump the info
+    out.println("\n<table>");
+    for (LocatedBlock cur : blocks) {
+      out.print("<tr>");
+      final String blockidstring = Long.toString(cur.getBlock().getBlockId());
+      blockSize = cur.getBlock().getNumBytes();
+      out.print("<td>" + blockidstring + ":</td>");
+      DatanodeInfo[] locs = cur.getLocations();
+      for (int j = 0; j < locs.length; j++) {
+        String datanodeAddr = locs[j].getName();
+        datanodePort = Integer.parseInt(datanodeAddr.substring(datanodeAddr
+            .indexOf(':') + 1, datanodeAddr.length()));
+        fqdn = InetAddress.getByName(locs[j].getHost()).getCanonicalHostName();
+        String blockUrl = "http://" + fqdn + ":" + locs[j].getInfoPort()
+            + "/browseBlock.jsp?blockId=" + blockidstring
+            + "&blockSize=" + blockSize
+            + "&filename=" + URLEncoder.encode(filename, "UTF-8")
+            + "&datanodePort=" + datanodePort
+            + "&genstamp=" + cur.getBlock().getGenerationStamp()
+            + "&namenodeInfoPort=" + namenodeInfoPort
+            + "&chunkSizeToView=" + chunkSizeToView;
+        out.print("<td>&nbsp</td><td><a href=\"" + blockUrl + "\">"
+            + datanodeAddr + "</a></td>");
+      }
+      out.println("</tr>");
+    }
+    out.println("</table>");
+    out.print("<hr>");
+    String namenodeHost = datanode.getNameNodeAddr().getHostName();
+    out.print("<br><a href=\"http://"
+        + InetAddress.getByName(namenodeHost).getCanonicalHostName() + ":"
+        + namenodeInfoPort + "/dfshealth.jsp\">Go back to DFS home</a>");
+    dfs.close();
+  }
+
+  static void generateFileChunks(JspWriter out, HttpServletRequest req)
+      throws IOException {
+    long startOffset = 0;
+    int datanodePort = 0;
+
+    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
+    int namenodeInfoPort = -1;
+    if (namenodeInfoPortStr != null)
+      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
+
+    final String filename = JspHelper
+        .validatePath(req.getParameter("filename"));
+    if (filename == null) {
+      out.print("Invalid input (filename absent)");
+      return;
+    }
+
+    final Long blockId = JspHelper.validateLong(req.getParameter("blockId"));
+    if (blockId == null) {
+      out.print("Invalid input (blockId absent)");
+      return;
+    }
+
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
+        JspHelper.conf);
+
+    AccessToken accessToken = AccessToken.DUMMY_TOKEN;
+    if (JspHelper.conf.getBoolean(
+        AccessTokenHandler.STRING_ENABLE_ACCESS_TOKEN, false)) {
+      List<LocatedBlock> blks = dfs.namenode.getBlockLocations(filename, 0,
+          Long.MAX_VALUE).getLocatedBlocks();
+      if (blks == null || blks.size() == 0) {
+        out.print("Can't locate file blocks");
+        dfs.close();
+        return;
+      }
+      for (int i = 0; i < blks.size(); i++) {
+        if (blks.get(i).getBlock().getBlockId() == blockId) {
+          accessToken = blks.get(i).getAccessToken();
+          break;
+        }
+      }
+    }
+
+    final Long genStamp = JspHelper.validateLong(req.getParameter("genstamp"));
+    if (genStamp == null) {
+      out.print("Invalid input (genstamp absent)");
+      return;
+    }
+
+    String blockSizeStr;
+    long blockSize = 0;
+    blockSizeStr = req.getParameter("blockSize");
+    if (blockSizeStr == null) {
+      out.print("Invalid input (blockSize absent)");
+      return;
+    }
+    blockSize = Long.parseLong(blockSizeStr);
+
+    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req
+        .getParameter("chunkSizeToView"));
+
+    String startOffsetStr = req.getParameter("startOffset");
+    if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
+      startOffset = 0;
+    else
+      startOffset = Long.parseLong(startOffsetStr);
+
+    String datanodePortStr = req.getParameter("datanodePort");
+    if (datanodePortStr == null) {
+      out.print("Invalid input (datanodePort absent)");
+      return;
+    }
+    datanodePort = Integer.parseInt(datanodePortStr);
+    out.print("<h3>File: ");
+    JspHelper.printPathWithLinks(filename, out, namenodeInfoPort);
+    out.print("</h3><hr>");
+    String parent = new File(filename).getParent();
+    JspHelper.printGotoForm(out, namenodeInfoPort, parent);
+    out.print("<hr>");
+    out.print("<a href=\"http://"
+        + req.getServerName() + ":" + req.getServerPort()
+        + "/browseDirectory.jsp?dir=" + URLEncoder.encode(parent, "UTF-8")
+        + "&namenodeInfoPort=" + namenodeInfoPort
+        + "\"><i>Go back to dir listing</i></a><br>");
+    out.print("<a href=\"#viewOptions\">Advanced view/download options</a><br>");
+    out.print("<hr>");
+
+    // Determine the prev & next blocks
+    long nextStartOffset = 0;
+    long nextBlockSize = 0;
+    String nextBlockIdStr = null;
+    String nextGenStamp = null;
+    String nextHost = req.getServerName();
+    int nextPort = req.getServerPort();
+    int nextDatanodePort = datanodePort;
+    // determine data for the next link
+    if (startOffset + chunkSizeToView >= blockSize) {
+      // we have to go to the next block from this point onwards
+      List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(filename, 0,
+          Long.MAX_VALUE).getLocatedBlocks();
+      for (int i = 0; i < blocks.size(); i++) {
+        if (blocks.get(i).getBlock().getBlockId() == blockId) {
+          if (i != blocks.size() - 1) {
+            LocatedBlock nextBlock = blocks.get(i + 1);
+            nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId());
+            nextGenStamp = Long.toString(nextBlock.getBlock()
+                .getGenerationStamp());
+            nextStartOffset = 0;
+            nextBlockSize = nextBlock.getBlock().getNumBytes();
+            DatanodeInfo d = JspHelper.bestNode(nextBlock);
+            String datanodeAddr = d.getName();
+            nextDatanodePort = Integer.parseInt(datanodeAddr.substring(
+                datanodeAddr.indexOf(':') + 1, datanodeAddr.length()));
+            nextHost = InetAddress.getByName(d.getHost())
+                .getCanonicalHostName();
+            nextPort = d.getInfoPort();
+          }
+        }
+      }
+    } else {
+      // we are in the same block
+      nextBlockIdStr = blockId.toString();
+      nextStartOffset = startOffset + chunkSizeToView;
+      nextBlockSize = blockSize;
+      nextGenStamp = genStamp.toString();
+    }
+    String nextUrl = null;
+    if (nextBlockIdStr != null) {
+      nextUrl = "http://" + nextHost + ":" + nextPort
+          + "/browseBlock.jsp?blockId=" + nextBlockIdStr
+          + "&blockSize=" + nextBlockSize
+          + "&startOffset=" + nextStartOffset
+          + "&genstamp=" + nextGenStamp
+          + "&filename=" + URLEncoder.encode(filename, "UTF-8")
+          + "&chunkSizeToView=" + chunkSizeToView
+          + "&datanodePort=" + nextDatanodePort
+          + "&namenodeInfoPort=" + namenodeInfoPort;
+      out.print("<a href=\"" + nextUrl + "\">View Next chunk</a>&nbsp;&nbsp;");
+    }
+    // determine data for the prev link
+    String prevBlockIdStr = null;
+    String prevGenStamp = null;
+    long prevStartOffset = 0;
+    long prevBlockSize = 0;
+    String prevHost = req.getServerName();
+    int prevPort = req.getServerPort();
+    int prevDatanodePort = datanodePort;
+    if (startOffset == 0) {
+      List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(filename, 0,
+          Long.MAX_VALUE).getLocatedBlocks();
+      for (int i = 0; i < blocks.size(); i++) {
+        if (blocks.get(i).getBlock().getBlockId() == blockId) {
+          if (i != 0) {
+            LocatedBlock prevBlock = blocks.get(i - 1);
+            prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId());
+            prevGenStamp = Long.toString(prevBlock.getBlock()
+                .getGenerationStamp());
+            prevStartOffset = prevBlock.getBlock().getNumBytes()
+                - chunkSizeToView;
+            if (prevStartOffset < 0)
+              prevStartOffset = 0;
+            prevBlockSize = prevBlock.getBlock().getNumBytes();
+            DatanodeInfo d = JspHelper.bestNode(prevBlock);
+            String datanodeAddr = d.getName();
+            prevDatanodePort = Integer.parseInt(datanodeAddr.substring(
+                datanodeAddr.indexOf(':') + 1, datanodeAddr.length()));
+            prevHost = InetAddress.getByName(d.getHost())
+                .getCanonicalHostName();
+            prevPort = d.getInfoPort();
+          }
+        }
+      }
+    } else {
+      // we are in the same block
+      prevBlockIdStr = blockId.toString();
+      prevStartOffset = startOffset - chunkSizeToView;
+      if (prevStartOffset < 0)
+        prevStartOffset = 0;
+      prevBlockSize = blockSize;
+      prevGenStamp = genStamp.toString();
+    }
+
+    String prevUrl = null;
+    if (prevBlockIdStr != null) {
+      prevUrl = "http://" + prevHost + ":" + prevPort
+          + "/browseBlock.jsp?blockId=" + prevBlockIdStr
+          + "&blockSize=" + prevBlockSize
+          + "&startOffset=" + prevStartOffset
+          + "&filename=" + URLEncoder.encode(filename, "UTF-8")
+          + "&chunkSizeToView=" + chunkSizeToView
+          + "&genstamp=" + prevGenStamp
+          + "&datanodePort=" + prevDatanodePort
+          + "&namenodeInfoPort=" + namenodeInfoPort;
+      out.print("<a href=\"" + prevUrl + "\">View Prev chunk</a>&nbsp;&nbsp;");
+    }
+    out.print("<hr>");
+    out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
+    try {
+      JspHelper.streamBlockInAscii(new InetSocketAddress(req.getServerName(),
+          datanodePort), blockId, accessToken, genStamp, blockSize,
+          startOffset, chunkSizeToView, out);
+    } catch (Exception e) {
+      out.print(e);
+    }
+    out.print("</textarea>");
+    dfs.close();
+  }
+
+  static void generateFileChunksForTail(JspWriter out, HttpServletRequest req)
+      throws IOException {
+    final String referrer = JspHelper.validateURL(req.getParameter("referrer"));
+    boolean noLink = false;
+    if (referrer == null) {
+      noLink = true;
+    }
+
+    final String filename = JspHelper
+        .validatePath(req.getParameter("filename"));
+    if (filename == null) {
+      out.print("Invalid input (file name absent)");
+      return;
+    }
+
+    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
+    int namenodeInfoPort = -1;
+    if (namenodeInfoPortStr != null)
+      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
+
+    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req
+        .getParameter("chunkSizeToView"));
+
+    if (!noLink) {
+      out.print("<h3>Tail of File: ");
+      JspHelper.printPathWithLinks(filename, out, namenodeInfoPort);
+      out.print("</h3><hr>");
+      out.print("<a href=\"" + referrer + "\">Go Back to File View</a><hr>");
+    } else {
+      out.print("<h3>" + filename + "</h3>");
+    }
+    out.print("<b>Chunk size to view (in bytes, up to file's DFS block size): </b>");
+    out.print("<input type=\"text\" name=\"chunkSizeToView\" value="
+        + chunkSizeToView + " size=10 maxlength=10>");
+    out.print("&nbsp;&nbsp;<input type=\"submit\" name=\"submit\" value=\"Refresh\"><hr>");
+    out.print("<input type=\"hidden\" name=\"filename\" value=\"" + filename
+        + "\">");
+    out.print("<input type=\"hidden\" name=\"namenodeInfoPort\" value=\""
+        + namenodeInfoPort + "\">");
+    if (!noLink)
+      out.print("<input type=\"hidden\" name=\"referrer\" value=\"" + referrer
+          + "\">");
+
+    // fetch the block from the datanode that has the last block for this file
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
+        JspHelper.conf);
+    List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(filename, 0,
+        Long.MAX_VALUE).getLocatedBlocks();
+    if (blocks == null || blocks.size() == 0) {
+      out.print("No datanodes contain blocks of file " + filename);
+      dfs.close();
+      return;
+    }
+    LocatedBlock lastBlk = blocks.get(blocks.size() - 1);
+    long blockSize = lastBlk.getBlock().getNumBytes();
+    long blockId = lastBlk.getBlock().getBlockId();
+    AccessToken accessToken = lastBlk.getAccessToken();
+    long genStamp = lastBlk.getBlock().getGenerationStamp();
+    DatanodeInfo chosenNode;
+    try {
+      chosenNode = JspHelper.bestNode(lastBlk);
+    } catch (IOException e) {
+      out.print(e.toString());
+      dfs.close();
+      return;
+    }
+    InetSocketAddress addr = NetUtils.createSocketAddr(chosenNode.getName());
+    // view the last chunkSizeToView bytes while Tailing
+    final long startOffset = blockSize >= chunkSizeToView ? blockSize
+        - chunkSizeToView : 0;
+
+    out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
+    JspHelper.streamBlockInAscii(addr, blockId, accessToken, genStamp,
+        blockSize, startOffset, chunkSizeToView, out);
+    out.print("</textarea>");
+    dfs.close();
+  }
+}
\ No newline at end of file

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java Tue May 19 22:37:21 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Tue May 19 22:37:21 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 
 /** Redirect queries about the hosted filesystem to an appropriate datanode.

Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java?rev=776490&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java Tue May 19 22:37:21 2009
@@ -0,0 +1,453 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.jsp.JspWriter;
+
+import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
+import org.apache.hadoop.hdfs.server.common.Storage;
+import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
+import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
+import org.apache.hadoop.util.ServletUtil;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.VersionInfo;
+
+class NamenodeJspHelper {
+  static String getSafeModeText(FSNamesystem fsn) {
+    if (!fsn.isInSafeMode())
+      return "";
+    return "Safe mode is ON. <em>" + fsn.getSafeModeTip() + "</em><br>";
+  }
+
+  static String getInodeLimitText(FSNamesystem fsn) {
+    long inodes = fsn.dir.totalInodes();
+    long blocks = fsn.getBlocksTotal();
+    long maxobjects = fsn.getMaxObjects();
+    long totalMemory = Runtime.getRuntime().totalMemory();
+    long maxMemory = Runtime.getRuntime().maxMemory();
+
+    long used = (totalMemory * 100) / maxMemory;
+
+    String str = inodes + " files and directories, " + blocks + " blocks = "
+        + (inodes + blocks) + " total";
+    if (maxobjects != 0) {
+      long pct = ((inodes + blocks) * 100) / maxobjects;
+      str += " / " + maxobjects + " (" + pct + "%)";
+    }
+    str += ".  Heap Size is " + StringUtils.byteDesc(totalMemory) + " / "
+        + StringUtils.byteDesc(maxMemory) + " (" + used + "%) <br>";
+    return str;
+  }
+
+  static String getUpgradeStatusText(FSNamesystem fsn) {
+    String statusText = "";
+    try {
+      UpgradeStatusReport status = fsn
+          .distributedUpgradeProgress(UpgradeAction.GET_STATUS);
+      statusText = (status == null ? "There are no upgrades in progress."
+          : status.getStatusText(false));
+    } catch (IOException e) {
+      statusText = "Upgrade status unknown.";
+    }
+    return statusText;
+  }
+
+  /** Return a table containing version information. */
+  static String getVersionTable(FSNamesystem fsn) {
+    return "<div id='dfstable'><table>"
+        + "\n  <tr><td id='col1'>Started:</td><td>" + fsn.getStartTime()
+        + "</td></tr>\n" + "\n  <tr><td id='col1'>Version:</td><td>"
+        + VersionInfo.getVersion() + ", " + VersionInfo.getRevision()
+        + "\n  <tr><td id='col1'>Compiled:</td><td>" + VersionInfo.getDate()
+        + " by " + VersionInfo.getUser() + " from " + VersionInfo.getBranch()
+        + "\n  <tr><td id='col1'>Upgrades:</td><td>"
+        + getUpgradeStatusText(fsn) + "\n</table></div>";
+  }
+
+  static String getWarningText(FSNamesystem fsn) {
+    // Ideally this should be displayed in RED
+    long missingBlocks = fsn.getMissingBlocksCount();
+    if (missingBlocks > 0) {
+      return "<br> WARNING :" + " There are about " + missingBlocks
+          + " missing blocks. Please check the log or run fsck. <br><br>";
+    }
+    return "";
+  }
+
+  static class HealthJsp {
+    private int rowNum = 0;
+    private int colNum = 0;
+    private String sorterField = null;
+    private String sorterOrder = null;
+
+    private String rowTxt() {
+      colNum = 0;
+      return "<tr class=\"" + (((rowNum++) % 2 == 0) ? "rowNormal" : "rowAlt")
+          + "\"> ";
+    }
+
+    private String colTxt() {
+      return "<td id=\"col" + ++colNum + "\"> ";
+    }
+
+    private void counterReset() {
+      colNum = 0;
+      rowNum = 0;
+    }
+
+    void generateConfReport(JspWriter out, NameNode nn,
+        HttpServletRequest request) throws IOException {
+      FSNamesystem fsn = nn.getNamesystem();
+      FSImage fsImage = fsn.getFSImage();
+      List<Storage.StorageDirectory> removedStorageDirs = fsImage
+          .getRemovedStorageDirs();
+
+      // FS Image storage configuration
+      out.print("<h3> " + nn.getRole() + " Storage: </h3>");
+      out.print("<div id=\"dfstable\"> <table border=1 cellpadding=10 cellspacing=0 title=\"NameNode Storage\">\n"
+              + "<thead><tr><td><b>Storage Directory</b></td><td><b>Type</b></td><td><b>State</b></td></tr></thead>");
+
+      StorageDirectory st = null;
+      for (Iterator<StorageDirectory> it = fsImage.dirIterator(); it.hasNext();) {
+        st = it.next();
+        String dir = "" + st.getRoot();
+        String type = "" + st.getStorageDirType();
+        out.print("<tr><td>" + dir + "</td><td>" + type
+            + "</td><td>Active</td></tr>");
+      }
+
+      long storageDirsSize = removedStorageDirs.size();
+      for (int i = 0; i < storageDirsSize; i++) {
+        st = removedStorageDirs.get(i);
+        String dir = "" + st.getRoot();
+        String type = "" + st.getStorageDirType();
+        out.print("<tr><td>" + dir + "</td><td>" + type
+            + "</td><td><font color=red>Failed</font></td></tr>");
+      }
+
+      out.print("</table></div><br>\n");
+    }
+
+    void generateHealthReport(JspWriter out, NameNode nn,
+        HttpServletRequest request) throws IOException {
+      FSNamesystem fsn = nn.getNamesystem();
+      ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
+      ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
+      fsn.DFSNodesStatus(live, dead);
+
+      sorterField = request.getParameter("sorter/field");
+      sorterOrder = request.getParameter("sorter/order");
+      if (sorterField == null)
+        sorterField = "name";
+      if (sorterOrder == null)
+        sorterOrder = "ASC";
+
+      // Find out common suffix. Should this be before or after the sort?
+      String port_suffix = null;
+      if (live.size() > 0) {
+        String name = live.get(0).getName();
+        int idx = name.indexOf(':');
+        if (idx > 0) {
+          port_suffix = name.substring(idx);
+        }
+
+        for (int i = 1; port_suffix != null && i < live.size(); i++) {
+          if (live.get(i).getName().endsWith(port_suffix) == false) {
+            port_suffix = null;
+            break;
+          }
+        }
+      }
+
+      counterReset();
+      long[] fsnStats = fsn.getStats();
+      long total = fsnStats[0];
+      long remaining = fsnStats[2];
+      long used = fsnStats[1];
+      long nonDFS = total - remaining - used;
+      nonDFS = nonDFS < 0 ? 0 : nonDFS;
+      float percentUsed = total <= 0 ? 0f : ((float) used * 100.0f)
+          / (float) total;
+      float percentRemaining = total <= 0 ? 100f : ((float) remaining * 100.0f)
+          / (float) total;
+
+      out.print("<div id=\"dfstable\"> <table>\n" + rowTxt() + colTxt()
+          + "Configured Capacity" + colTxt() + ":" + colTxt()
+          + StringUtils.byteDesc(total) + rowTxt() + colTxt() + "DFS Used"
+          + colTxt() + ":" + colTxt() + StringUtils.byteDesc(used) + rowTxt()
+          + colTxt() + "Non DFS Used" + colTxt() + ":" + colTxt()
+          + StringUtils.byteDesc(nonDFS) + rowTxt() + colTxt()
+          + "DFS Remaining" + colTxt() + ":" + colTxt()
+          + StringUtils.byteDesc(remaining) + rowTxt() + colTxt() + "DFS Used%"
+          + colTxt() + ":" + colTxt()
+          + StringUtils.limitDecimalTo2(percentUsed) + " %" + rowTxt()
+          + colTxt() + "DFS Remaining%" + colTxt() + ":" + colTxt()
+          + StringUtils.limitDecimalTo2(percentRemaining) + " %" + rowTxt()
+          + colTxt()
+          + "<a href=\"dfsnodelist.jsp?whatNodes=LIVE\">Live Nodes</a> "
+          + colTxt() + ":" + colTxt() + live.size() + rowTxt() + colTxt()
+          + "<a href=\"dfsnodelist.jsp?whatNodes=DEAD\">Dead Nodes</a> "
+          + colTxt() + ":" + colTxt() + dead.size() + "</table></div><br>\n");
+
+      if (live.isEmpty() && dead.isEmpty()) {
+        out.print("There are no datanodes in the cluster");
+      }
+    }
+  }
+
+  static void redirectToRandomDataNode(NameNode nn, HttpServletResponse resp)
+      throws IOException {
+    FSNamesystem fsn = nn.getNamesystem();
+    String datanode = fsn.randomDataNode();
+    String redirectLocation;
+    String nodeToRedirect;
+    int redirectPort;
+    if (datanode != null) {
+      redirectPort = Integer.parseInt(datanode
+          .substring(datanode.indexOf(':') + 1));
+      nodeToRedirect = datanode.substring(0, datanode.indexOf(':'));
+    } else {
+      nodeToRedirect = nn.getHttpAddress().getHostName();
+      redirectPort = nn.getHttpAddress().getPort();
+    }
+    String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
+    redirectLocation = "http://" + fqdn + ":" + redirectPort
+        + "/browseDirectory.jsp?namenodeInfoPort="
+        + nn.getHttpAddress().getPort() + "&dir="
+        + URLEncoder.encode("/", "UTF-8");
+    resp.sendRedirect(redirectLocation);
+  }
+
+  static class NodeListJsp {
+    private int rowNum = 0;
+
+    private long diskBytes = 1024 * 1024 * 1024;
+    private String diskByteStr = "GB";
+
+    private String sorterField = null;
+    private String sorterOrder = null;
+
+    private String whatNodes = "LIVE";
+
+    private String rowTxt() {
+      return "<tr class=\"" + (((rowNum++) % 2 == 0) ? "rowNormal" : "rowAlt")
+          + "\"> ";
+    }
+
+    private void counterReset() {
+      rowNum = 0;
+    }
+
+    private String nodeHeaderStr(String name) {
+      String ret = "class=header";
+      String order = "ASC";
+      if (name.equals(sorterField)) {
+        ret += sorterOrder;
+        if (sorterOrder.equals("ASC"))
+          order = "DSC";
+      }
+      ret += " onClick=\"window.document.location="
+          + "'/dfsnodelist.jsp?whatNodes=" + whatNodes + "&sorter/field="
+          + name + "&sorter/order=" + order
+          + "'\" title=\"sort on this column\"";
+
+      return ret;
+    }
+
+    void generateNodeData(JspWriter out, DatanodeDescriptor d,
+        String suffix, boolean alive, int nnHttpPort) throws IOException {
+      /*
+       * Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5 we use:
+       * 1) d.getHostName():d.getPort() to display. Domain and port are stripped
+       *    if they are common across the nodes. i.e. "dn1"
+       * 2) d.getHost():d.Port() for "title". i.e. "192.168.0.5:50010"
+       * 3) d.getHostName():d.getInfoPort() for url.
+       *    i.e. "http://dn1.hadoop.apache.org:50075/..."
+       * Note that "d.getHost():d.getPort()" is what DFS clients use to
+       * interact with datanodes.
+       */
+
+      // from nn_browsedfscontent.jsp:
+      String url = "http://" + d.getHostName() + ":" + d.getInfoPort()
+          + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
+          + URLEncoder.encode("/", "UTF-8");
+
+      String name = d.getHostName() + ":" + d.getPort();
+      if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*"))
+        name = name.replaceAll("\\.[^.:]*", "");
+      int idx = (suffix != null && name.endsWith(suffix)) ? name
+          .indexOf(suffix) : -1;
+
+      out.print(rowTxt() + "<td class=\"name\"><a title=\"" + d.getHost() + ":"
+          + d.getPort() + "\" href=\"" + url + "\">"
+          + ((idx > 0) ? name.substring(0, idx) : name) + "</a>"
+          + ((alive) ? "" : "\n"));
+      if (!alive)
+        return;
+
+      long c = d.getCapacity();
+      long u = d.getDfsUsed();
+      long nu = d.getNonDfsUsed();
+      long r = d.getRemaining();
+      String percentUsed = StringUtils.limitDecimalTo2(d.getDfsUsedPercent());
+      String percentRemaining = StringUtils.limitDecimalTo2(d
+          .getRemainingPercent());
+
+      String adminState = (d.isDecommissioned() ? "Decommissioned" : (d
+          .isDecommissionInProgress() ? "Decommission In Progress"
+          : "In Service"));
+
+      long timestamp = d.getLastUpdate();
+      long currentTime = System.currentTimeMillis();
+      out.print("<td class=\"lastcontact\"> "
+          + ((currentTime - timestamp) / 1000)
+          + "<td class=\"adminstate\">"
+          + adminState
+          + "<td align=\"right\" class=\"capacity\">"
+          + StringUtils.limitDecimalTo2(c * 1.0 / diskBytes)
+          + "<td align=\"right\" class=\"used\">"
+          + StringUtils.limitDecimalTo2(u * 1.0 / diskBytes)
+          + "<td align=\"right\" class=\"nondfsused\">"
+          + StringUtils.limitDecimalTo2(nu * 1.0 / diskBytes)
+          + "<td align=\"right\" class=\"remaining\">"
+          + StringUtils.limitDecimalTo2(r * 1.0 / diskBytes)
+          + "<td align=\"right\" class=\"pcused\">"
+          + percentUsed
+          + "<td class=\"pcused\">"
+          + ServletUtil.percentageGraph((int) Double.parseDouble(percentUsed),
+              100) + "<td align=\"right\" class=\"pcremaining`\">"
+          + percentRemaining + "<td title=" + "\"blocks scheduled : "
+          + d.getBlocksScheduled() + "\" class=\"blocks\">" + d.numBlocks()
+          + "\n");
+    }
+
+    void generateNodesList(JspWriter out, NameNode nn,
+        HttpServletRequest request) throws IOException {
+      ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
+      ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
+      nn.getNamesystem().DFSNodesStatus(live, dead);
+
+      whatNodes = request.getParameter("whatNodes"); // show only live or only
+                                                     // dead nodes
+      sorterField = request.getParameter("sorter/field");
+      sorterOrder = request.getParameter("sorter/order");
+      if (sorterField == null)
+        sorterField = "name";
+      if (sorterOrder == null)
+        sorterOrder = "ASC";
+
+      JspHelper.sortNodeList(live, sorterField, sorterOrder);
+      JspHelper.sortNodeList(dead, "name", "ASC");
+
+      // Find out common suffix. Should this be before or after the sort?
+      String port_suffix = null;
+      if (live.size() > 0) {
+        String name = live.get(0).getName();
+        int idx = name.indexOf(':');
+        if (idx > 0) {
+          port_suffix = name.substring(idx);
+        }
+
+        for (int i = 1; port_suffix != null && i < live.size(); i++) {
+          if (live.get(i).getName().endsWith(port_suffix) == false) {
+            port_suffix = null;
+            break;
+          }
+        }
+      }
+
+      counterReset();
+
+      try {
+        Thread.sleep(1000);
+      } catch (InterruptedException e) {
+      }
+
+      if (live.isEmpty() && dead.isEmpty()) {
+        out.print("There are no datanodes in the cluster");
+      } else {
+
+        int nnHttpPort = nn.getHttpAddress().getPort();
+        out.print("<div id=\"dfsnodetable\"> ");
+        if (whatNodes.equals("LIVE")) {
+
+          out.print("<a name=\"LiveNodes\" id=\"title\">" + "Live Datanodes : "
+              + live.size() + "</a>"
+              + "<br><br>\n<table border=1 cellspacing=0>\n");
+
+          counterReset();
+
+          if (live.size() > 0) {
+            if (live.get(0).getCapacity() > 1024 * diskBytes) {
+              diskBytes *= 1024;
+              diskByteStr = "TB";
+            }
+
+            out.print("<tr class=\"headerRow\"> <th " + nodeHeaderStr("name")
+                + "> Node <th " + nodeHeaderStr("lastcontact")
+                + "> Last <br>Contact <th " + nodeHeaderStr("adminstate")
+                + "> Admin State <th " + nodeHeaderStr("capacity")
+                + "> Configured <br>Capacity (" + diskByteStr + ") <th "
+                + nodeHeaderStr("used") + "> Used <br>(" + diskByteStr
+                + ") <th " + nodeHeaderStr("nondfsused")
+                + "> Non DFS <br>Used (" + diskByteStr + ") <th "
+                + nodeHeaderStr("remaining") + "> Remaining <br>("
+                + diskByteStr + ") <th " + nodeHeaderStr("pcused")
+                + "> Used <br>(%) <th " + nodeHeaderStr("pcused")
+                + "> Used <br>(%) <th " + nodeHeaderStr("pcremaining")
+                + "> Remaining <br>(%) <th " + nodeHeaderStr("blocks")
+                + "> Blocks\n");
+
+            JspHelper.sortNodeList(live, sorterField, sorterOrder);
+            for (int i = 0; i < live.size(); i++) {
+              generateNodeData(out, live.get(i), port_suffix, true, nnHttpPort);
+            }
+          }
+          out.print("</table>\n");
+        } else {
+
+          out.print("<br> <a name=\"DeadNodes\" id=\"title\"> "
+              + " Dead Datanodes : " + dead.size() + "</a><br><br>\n");
+
+          if (dead.size() > 0) {
+            out.print("<table border=1 cellspacing=0> <tr id=\"row1\"> "
+                + "<td> Node \n");
+
+            JspHelper.sortNodeList(dead, "name", "ASC");
+            for (int i = 0; i < dead.size(); i++) {
+              generateNodeData(out, dead.get(i), port_suffix, false, nnHttpPort);
+            }
+
+            out.print("</table>\n");
+          }
+        }
+        out.print("</div>");
+      }
+    }
+  }
+}
\ No newline at end of file

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java Tue May 19 22:37:21 2009
@@ -29,6 +29,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 

Modified: hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp (original)
+++ hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp Tue May 19 22:37:21 2009
@@ -26,375 +26,18 @@
   import="java.net.*"
 
   import="org.apache.hadoop.hdfs.*"
-  import="org.apache.hadoop.hdfs.server.namenode.*"
   import="org.apache.hadoop.hdfs.protocol.*"
-  import="org.apache.hadoop.security.AccessToken"
-  import="org.apache.hadoop.security.AccessTokenHandler"
+  import="org.apache.hadoop.hdfs.server.common.JspHelper"
   import="org.apache.hadoop.util.*"
 %>
-
-<%!
-  static final DataNode datanode = DataNode.getDataNode();
-
-  public void generateFileDetails(JspWriter out, HttpServletRequest req) 
-    throws IOException {
-
-    long startOffset = 0;
-    int datanodePort;
-
-    final Long blockId = JspHelper.validateLong(req.getParameter("blockId"));
-    if (blockId == null) {
-      out.print("Invalid input (blockId absent)");
-      return;
-    }
-
-    String datanodePortStr = req.getParameter("datanodePort");
-    if (datanodePortStr == null) {
-      out.print("Invalid input (datanodePort absent)");
-      return;
-    }
-    datanodePort = Integer.parseInt(datanodePortStr);
-
-    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
-    int namenodeInfoPort = -1;
-    if (namenodeInfoPortStr != null)
-      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
-
-    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView"));
-
-    String startOffsetStr = req.getParameter("startOffset");
-    if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
-      startOffset = 0;
-    else startOffset = Long.parseLong(startOffsetStr);
-    
-    final String filename = JspHelper.validatePath(
-        req.getParameter("filename"));
-    if (filename == null) {
-      out.print("Invalid input");
-      return;
-    }
-
-    String blockSizeStr = req.getParameter("blockSize"); 
-    long blockSize = 0;
-    if (blockSizeStr == null || blockSizeStr.length() == 0) {
-      out.print("Invalid input");
-      return;
-    } 
-    blockSize = Long.parseLong(blockSizeStr);
-
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
-    List<LocatedBlock> blocks = 
-      dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
-    //Add the various links for looking at the file contents
-    //URL for downloading the full file
-    String downloadUrl = "http://" + req.getServerName() + ":" +
-                         + req.getServerPort() + "/streamFile?" + "filename=" +
-                         URLEncoder.encode(filename, "UTF-8");
-    out.print("<a name=\"viewOptions\"></a>");
-    out.print("<a href=\"" + downloadUrl + "\">Download this file</a><br>");
-    
-    DatanodeInfo chosenNode;
-    //URL for TAIL 
-    LocatedBlock lastBlk = blocks.get(blocks.size() - 1);
-    try {
-      chosenNode = JspHelper.bestNode(lastBlk);
-    } catch (IOException e) {
-      out.print(e.toString());
-      dfs.close();
-      return;
-    }
-    String fqdn = 
-           InetAddress.getByName(chosenNode.getHost()).getCanonicalHostName();
-    String tailUrl = "http://" + fqdn + ":" +
-                     chosenNode.getInfoPort() + 
-                 "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") +
-                 "&namenodeInfoPort=" + namenodeInfoPort +
-                 "&chunkSizeToView=" + chunkSizeToView +
-                 "&referrer=" + 
-          URLEncoder.encode(req.getRequestURL() + "?" + req.getQueryString(),
-                            "UTF-8");
-    out.print("<a href=\"" + tailUrl + "\">Tail this file</a><br>");
-
-    out.print("<form action=\"/browseBlock.jsp\" method=GET>");
-    out.print("<b>Chunk size to view (in bytes, up to file's DFS block size): </b>");
-    out.print("<input type=\"hidden\" name=\"blockId\" value=\"" + blockId +
-              "\">");
-    out.print("<input type=\"hidden\" name=\"blockSize\" value=\"" + 
-              blockSize + "\">");
-    out.print("<input type=\"hidden\" name=\"startOffset\" value=\"" + 
-              startOffset + "\">");
-    out.print("<input type=\"hidden\" name=\"filename\" value=\"" + filename +
-              "\">");
-    out.print("<input type=\"hidden\" name=\"datanodePort\" value=\"" + 
-              datanodePort+ "\">");
-    out.print("<input type=\"hidden\" name=\"namenodeInfoPort\" value=\"" +
-              namenodeInfoPort + "\">");
-    out.print("<input type=\"text\" name=\"chunkSizeToView\" value=" +
-              chunkSizeToView + " size=10 maxlength=10>");
-    out.print("&nbsp;&nbsp;<input type=\"submit\" name=\"submit\" value=\"Refresh\">");
-    out.print("</form>");
-    out.print("<hr>"); 
-    out.print("<a name=\"blockDetails\"></a>");
-    out.print("<B>Total number of blocks: "+blocks.size()+"</B><br>");
-    //generate a table and dump the info
-    out.println("\n<table>");
-    for (LocatedBlock cur : blocks) {
-      out.print("<tr>");
-      final String blockidstring = Long.toString(cur.getBlock().getBlockId());
-      blockSize = cur.getBlock().getNumBytes();
-      out.print("<td>"+blockidstring+":</td>");
-      DatanodeInfo[] locs = cur.getLocations();
-      for(int j=0; j<locs.length; j++) {
-        String datanodeAddr = locs[j].getName();
-        datanodePort = Integer.parseInt(datanodeAddr.substring(
-                                        datanodeAddr.indexOf(':') + 1, 
-                                    datanodeAddr.length())); 
-        fqdn = InetAddress.getByName(locs[j].getHost()).getCanonicalHostName();
-        String blockUrl = "http://"+ fqdn + ":" +
-                        locs[j].getInfoPort() +
-                        "/browseBlock.jsp?blockId=" + blockidstring +
-                        "&blockSize=" + blockSize +
-               "&filename=" + URLEncoder.encode(filename, "UTF-8")+ 
-                        "&datanodePort=" + datanodePort + 
-                        "&genstamp=" + cur.getBlock().getGenerationStamp() + 
-                        "&namenodeInfoPort=" + namenodeInfoPort +
-                        "&chunkSizeToView=" + chunkSizeToView;
-        out.print("<td>&nbsp</td>" 
-          + "<td><a href=\"" + blockUrl + "\">" + datanodeAddr + "</a></td>");
-      }
-      out.println("</tr>");
-    }
-    out.println("</table>");
-    out.print("<hr>");
-    String namenodeHost = datanode.getNameNodeAddr().getHostName();
-    out.print("<br><a href=\"http://" + 
-              InetAddress.getByName(namenodeHost).getCanonicalHostName() + ":" +
-              namenodeInfoPort + "/dfshealth.jsp\">Go back to DFS home</a>");
-    dfs.close();
-  }
-
-  public void generateFileChunks(JspWriter out, HttpServletRequest req) 
-    throws IOException {
-    long startOffset = 0;
-    int datanodePort = 0; 
-
-    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
-    int namenodeInfoPort = -1;
-    if (namenodeInfoPortStr != null)
-      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
-
-    final String filename = JspHelper.validatePath(
-        req.getParameter("filename"));
-    if (filename == null) {
-      out.print("Invalid input (filename absent)");
-      return;
-    }
-    
-    final Long blockId = JspHelper.validateLong(req.getParameter("blockId"));
-    if (blockId == null) {
-      out.print("Invalid input (blockId absent)");
-      return;
-    }
-
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
-    
-    AccessToken accessToken = AccessToken.DUMMY_TOKEN;
-    if (JspHelper.conf
-        .getBoolean(AccessTokenHandler.STRING_ENABLE_ACCESS_TOKEN, false)) {
-      List<LocatedBlock> blks = dfs.namenode.getBlockLocations(filename, 0,
-          Long.MAX_VALUE).getLocatedBlocks();
-      if (blks == null || blks.size() == 0) {
-        out.print("Can't locate file blocks");
-        dfs.close();
-        return;
-      }
-      for (int i = 0; i < blks.size(); i++) {
-        if (blks.get(i).getBlock().getBlockId() == blockId) {
-          accessToken = blks.get(i).getAccessToken();
-          break;
-        }
-      }
-    }
-    
-    final Long genStamp = JspHelper.validateLong(req.getParameter("genstamp"));
-    if (genStamp == null) {
-      out.print("Invalid input (genstamp absent)");
-      return;
-    }
-
-    String blockSizeStr;
-    long blockSize = 0;
-    blockSizeStr = req.getParameter("blockSize"); 
-    if (blockSizeStr == null) {
-      out.print("Invalid input (blockSize absent)");
-      return;
-    }
-    blockSize = Long.parseLong(blockSizeStr);
-    
-    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView"));
-
-    String startOffsetStr = req.getParameter("startOffset");
-    if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
-      startOffset = 0;
-    else startOffset = Long.parseLong(startOffsetStr);
-
-    String datanodePortStr = req.getParameter("datanodePort");
-    if (datanodePortStr == null) {
-      out.print("Invalid input (datanodePort absent)");
-      return;
-    }
-    datanodePort = Integer.parseInt(datanodePortStr);
-    out.print("<h3>File: ");
-    JspHelper.printPathWithLinks(filename, out, namenodeInfoPort);
-    out.print("</h3><hr>");
-    String parent = new File(filename).getParent();
-    JspHelper.printGotoForm(out, namenodeInfoPort, parent);
-    out.print("<hr>");
-    out.print("<a href=\"http://" + req.getServerName() + ":" + 
-              req.getServerPort() + 
-              "/browseDirectory.jsp?dir=" + 
-              URLEncoder.encode(parent, "UTF-8") +
-              "&namenodeInfoPort=" + namenodeInfoPort + 
-              "\"><i>Go back to dir listing</i></a><br>");
-    out.print("<a href=\"#viewOptions\">Advanced view/download options</a><br>");
-    out.print("<hr>");
-
-    //Determine the prev & next blocks
-    long nextStartOffset = 0;
-    long nextBlockSize = 0;
-    String nextBlockIdStr = null;
-    String nextGenStamp = null;
-    String nextHost = req.getServerName();
-    int nextPort = req.getServerPort();
-    int nextDatanodePort = datanodePort;
-    //determine data for the next link
-    if (startOffset + chunkSizeToView >= blockSize) {
-      //we have to go to the next block from this point onwards
-      List<LocatedBlock> blocks = 
-        dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
-      for (int i = 0; i < blocks.size(); i++) {
-        if (blocks.get(i).getBlock().getBlockId() == blockId) {
-          if (i != blocks.size() - 1) {
-            LocatedBlock nextBlock = blocks.get(i+1);
-            nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId());
-            nextGenStamp = Long.toString(nextBlock.getBlock().getGenerationStamp());
-            nextStartOffset = 0;
-            nextBlockSize = nextBlock.getBlock().getNumBytes();
-            DatanodeInfo d = JspHelper.bestNode(nextBlock);
-            String datanodeAddr = d.getName();
-            nextDatanodePort = Integer.parseInt(
-                                      datanodeAddr.substring(
-                                           datanodeAddr.indexOf(':') + 1, 
-                                      datanodeAddr.length())); 
-            nextHost = InetAddress.getByName(d.getHost()).getCanonicalHostName();
-            nextPort = d.getInfoPort(); 
-          }
-        }
-      }
-    } 
-    else {
-      //we are in the same block
-      nextBlockIdStr = blockId.toString();
-      nextStartOffset = startOffset + chunkSizeToView;
-      nextBlockSize = blockSize;
-      nextGenStamp = genStamp.toString();
-    }
-    String nextUrl = null;
-    if (nextBlockIdStr != null) {
-      nextUrl = "http://" + nextHost + ":" + 
-                nextPort + 
-                "/browseBlock.jsp?blockId=" + nextBlockIdStr +
-                "&blockSize=" + nextBlockSize + "&startOffset=" + 
-                nextStartOffset + 
-                "&genstamp=" + nextGenStamp +
-                "&filename=" + URLEncoder.encode(filename, "UTF-8") +
-                "&chunkSizeToView=" + chunkSizeToView + 
-                "&datanodePort=" + nextDatanodePort +
-                "&namenodeInfoPort=" + namenodeInfoPort;
-      out.print("<a href=\"" + nextUrl + "\">View Next chunk</a>&nbsp;&nbsp;");        
-    }
-    //determine data for the prev link
-    String prevBlockIdStr = null;
-    String prevGenStamp = null;
-    long prevStartOffset = 0;
-    long prevBlockSize = 0;
-    String prevHost = req.getServerName();
-    int prevPort = req.getServerPort();
-    int prevDatanodePort = datanodePort;
-    if (startOffset == 0) {
-      List<LocatedBlock> blocks = 
-        dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
-      for (int i = 0; i < blocks.size(); i++) {
-        if (blocks.get(i).getBlock().getBlockId() == blockId) {
-          if (i != 0) {
-            LocatedBlock prevBlock = blocks.get(i-1);
-            prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId());
-            prevGenStamp = Long.toString(prevBlock.getBlock().getGenerationStamp());
-            prevStartOffset = prevBlock.getBlock().getNumBytes() - chunkSizeToView;
-            if (prevStartOffset < 0)
-              prevStartOffset = 0;
-            prevBlockSize = prevBlock.getBlock().getNumBytes();
-            DatanodeInfo d = JspHelper.bestNode(prevBlock);
-            String datanodeAddr = d.getName();
-            prevDatanodePort = Integer.parseInt(
-                                      datanodeAddr.substring(
-                                          datanodeAddr.indexOf(':') + 1, 
-                                      datanodeAddr.length())); 
-            prevHost = InetAddress.getByName(d.getHost()).getCanonicalHostName();
-            prevPort = d.getInfoPort();
-          }
-        }
-      }
-    }
-    else {
-      //we are in the same block
-      prevBlockIdStr = blockId.toString();
-      prevStartOffset = startOffset - chunkSizeToView;
-      if (prevStartOffset < 0) prevStartOffset = 0;
-      prevBlockSize = blockSize;
-      prevGenStamp = genStamp.toString();
-    }
-
-    String prevUrl = null;
-    if (prevBlockIdStr != null) {
-      prevUrl = "http://" + prevHost + ":" + 
-                prevPort + 
-                "/browseBlock.jsp?blockId=" + prevBlockIdStr + 
-                "&blockSize=" + prevBlockSize + "&startOffset=" + 
-                prevStartOffset + 
-                "&filename=" + URLEncoder.encode(filename, "UTF-8") + 
-                "&chunkSizeToView=" + chunkSizeToView +
-                "&genstamp=" + prevGenStamp +
-                "&datanodePort=" + prevDatanodePort +
-                "&namenodeInfoPort=" + namenodeInfoPort;
-      out.print("<a href=\"" + prevUrl + "\">View Prev chunk</a>&nbsp;&nbsp;");
-    }
-    out.print("<hr>");
-    out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
-    try {
-    JspHelper.streamBlockInAscii(
-            new InetSocketAddress(req.getServerName(), datanodePort), blockId, 
-            accessToken, genStamp, blockSize, startOffset, chunkSizeToView, out);
-    } catch (Exception e){
-        out.print(e);
-    }
-    out.print("</textarea>");
-    dfs.close();
-  }
-
-%>
 <html>
 <head>
 <%JspHelper.createTitle(out, request, request.getParameter("filename")); %>
 </head>
 <body onload="document.goto.dir.focus()">
-<% 
-   generateFileChunks(out,request);
-%>
+<% DatanodeJspHelper.generateFileChunks(out,request); %>
 <hr>
-<% 
-   generateFileDetails(out,request);
-%>
+<% DatanodeJspHelper.generateFileDetails(out,request); %>
 
 <h2>Local logs</h2>
 <a href="/logs/">Log</a> directory

Modified: hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp (original)
+++ hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp Tue May 19 22:37:21 2009
@@ -27,133 +27,10 @@
 
   import="org.apache.hadoop.fs.*"
   import="org.apache.hadoop.hdfs.*"
-  import="org.apache.hadoop.hdfs.server.namenode.*"
   import="org.apache.hadoop.hdfs.protocol.*"
+  import="org.apache.hadoop.hdfs.server.common.JspHelper"
   import="org.apache.hadoop.util.*"
 %>
-<%!
-  static final DataNode datanode = DataNode.getDataNode();
-  
-  public void generateDirectoryStructure( JspWriter out, 
-                                          HttpServletRequest req,
-                                          HttpServletResponse resp) 
-    throws IOException {
-    final String dir = JspHelper.validatePath(req.getParameter("dir"));
-    if (dir == null) {
-      out.print("Invalid input");
-      return;
-    }
-    
-    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
-    int namenodeInfoPort = -1;
-    if (namenodeInfoPortStr != null)
-      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
-    
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
-    String target = dir;
-    final FileStatus targetStatus = dfs.getFileInfo(target);
-    if (targetStatus == null) { // not exists
-      out.print("<h3>File or directory : " + target + " does not exist</h3>");
-      JspHelper.printGotoForm(out, namenodeInfoPort, target);
-    }
-    else {
-      if( !targetStatus.isDir() ) { // a file
-        List<LocatedBlock> blocks = 
-          dfs.namenode.getBlockLocations(dir, 0, 1).getLocatedBlocks();
-	      
-        LocatedBlock firstBlock = null;
-        DatanodeInfo [] locations = null;
-        if (blocks.size() > 0) {
-          firstBlock = blocks.get(0);
-          locations = firstBlock.getLocations();
-        }
-        if (locations == null || locations.length == 0) {
-          out.print("Empty file");
-        } else {
-          DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock);
-          String fqdn = InetAddress.getByName(chosenNode.getHost()).
-            getCanonicalHostName();
-          String datanodeAddr = chosenNode.getName();
-          int datanodePort = Integer.parseInt(
-                                              datanodeAddr.substring(
-                                                                     datanodeAddr.indexOf(':') + 1, 
-                                                                     datanodeAddr.length())); 
-          String redirectLocation = "http://"+fqdn+":" +
-            chosenNode.getInfoPort() + 
-            "/browseBlock.jsp?blockId=" +
-            firstBlock.getBlock().getBlockId() +
-            "&blockSize=" + firstBlock.getBlock().getNumBytes() +
-            "&genstamp=" + firstBlock.getBlock().getGenerationStamp() +
-            "&filename=" + URLEncoder.encode(dir, "UTF-8") + 
-            "&datanodePort=" + datanodePort + 
-            "&namenodeInfoPort=" + namenodeInfoPort;
-          resp.sendRedirect(redirectLocation);
-        }
-        return;
-      }
-      // directory
-      FileStatus[] files = dfs.listPaths(target);
-      //generate a table and dump the info
-      String [] headings = { "Name", "Type", "Size", "Replication", 
-                              "Block Size", "Modification Time",
-                              "Permission", "Owner", "Group" };
-      out.print("<h3>Contents of directory ");
-      JspHelper.printPathWithLinks(dir, out, namenodeInfoPort);
-      out.print("</h3><hr>");
-      JspHelper.printGotoForm(out, namenodeInfoPort, dir);
-      out.print("<hr>");
-	
-      File f = new File(dir);
-      String parent;
-      if ((parent = f.getParent()) != null)
-        out.print("<a href=\"" + req.getRequestURL() + "?dir=" + parent +
-                  "&namenodeInfoPort=" + namenodeInfoPort +
-                  "\">Go to parent directory</a><br>");
-	
-      if (files == null || files.length == 0) {
-        out.print("Empty directory");
-      }
-      else {
-        JspHelper.addTableHeader(out);
-        int row=0;
-        JspHelper.addTableRow(out, headings, row++);
-        String cols [] = new String[headings.length];
-        for (int i = 0; i < files.length; i++) {
-          //Get the location of the first block of the file
-          if (files[i].getPath().toString().endsWith(".crc")) continue;
-          if (!files[i].isDir()) {
-            cols[1] = "file";
-            cols[2] = StringUtils.byteDesc(files[i].getLen());
-            cols[3] = Short.toString(files[i].getReplication());
-            cols[4] = StringUtils.byteDesc(files[i].getBlockSize());
-          }
-          else {
-            cols[1] = "dir";
-            cols[2] = "";
-            cols[3] = "";
-            cols[4] = "";
-          }
-          String datanodeUrl = req.getRequestURL()+"?dir="+
-              URLEncoder.encode(files[i].getPath().toString(), "UTF-8") + 
-              "&namenodeInfoPort=" + namenodeInfoPort;
-          cols[0] = "<a href=\""+datanodeUrl+"\">"+files[i].getPath().getName()+"</a>";
-          cols[5] = FsShell.dateForm.format(new Date((files[i].getModificationTime())));
-          cols[6] = files[i].getPermission().toString();
-          cols[7] = files[i].getOwner();
-          cols[8] = files[i].getGroup();
-          JspHelper.addTableRow(out, cols, row++);
-        }
-        JspHelper.addTableFooter(out);
-      }
-    } 
-    String namenodeHost = datanode.getNameNodeAddr().getHostName();
-    out.print("<br><a href=\"http://" + 
-              InetAddress.getByName(namenodeHost).getCanonicalHostName() + ":" +
-              namenodeInfoPort + "/dfshealth.jsp\">Go back to DFS home</a>");
-    dfs.close();
-  }
-
-%>
 
 <html>
 <head>
@@ -171,7 +48,7 @@
 <body onload="document.goto.dir.focus()">
 <% 
   try {
-    generateDirectoryStructure(out,request,response);
+    DatanodeJspHelper.generateDirectoryStructure(out,request,response);
   }
   catch(IOException ioe) {
     String msg = ioe.getLocalizedMessage();

Modified: hadoop/core/trunk/src/webapps/datanode/tail.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/tail.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/datanode/tail.jsp (original)
+++ hadoop/core/trunk/src/webapps/datanode/tail.jsp Tue May 19 22:37:21 2009
@@ -30,100 +30,15 @@
   import="org.apache.hadoop.hdfs.protocol.*"
   import="org.apache.hadoop.security.AccessToken"
   import="org.apache.hadoop.util.*"
-  import="org.apache.hadoop.net.NetUtils"
+  import="org.apache.hadoop.hdfs.server.common.JspHelper"
 %>
-
-<%!
-  static final DataNode datanode = DataNode.getDataNode();
-
-  public void generateFileChunks(JspWriter out, HttpServletRequest req) 
-    throws IOException {
-    final String referrer = JspHelper.validateURL(req.getParameter("referrer"));
-    boolean noLink = false;
-    if (referrer == null) {
-      noLink = true;
-    }
-
-    final String filename = JspHelper.validatePath(
-        req.getParameter("filename"));
-    if (filename == null) {
-      out.print("Invalid input (file name absent)");
-      return;
-    }
-
-    String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
-    int namenodeInfoPort = -1;
-    if (namenodeInfoPortStr != null)
-      namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
-    
-    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView"));
-
-    if (!noLink) {
-      out.print("<h3>Tail of File: ");
-      JspHelper.printPathWithLinks(filename, out, namenodeInfoPort);
-	    out.print("</h3><hr>");
-      out.print("<a href=\"" + referrer + "\">Go Back to File View</a><hr>");
-    }
-    else {
-      out.print("<h3>" + filename + "</h3>");
-    }
-    out.print("<b>Chunk size to view (in bytes, up to file's DFS block size): </b>");
-    out.print("<input type=\"text\" name=\"chunkSizeToView\" value=" +
-              chunkSizeToView + " size=10 maxlength=10>");
-    out.print("&nbsp;&nbsp;<input type=\"submit\" name=\"submit\" value=\"Refresh\"><hr>");
-    out.print("<input type=\"hidden\" name=\"filename\" value=\"" + filename +
-              "\">");
-    out.print("<input type=\"hidden\" name=\"namenodeInfoPort\" value=\"" + namenodeInfoPort +
-    "\">");
-    if (!noLink)
-      out.print("<input type=\"hidden\" name=\"referrer\" value=\"" + 
-                referrer+ "\">");
-
-    //fetch the block from the datanode that has the last block for this file
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
-    List<LocatedBlock> blocks = 
-      dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
-    if (blocks == null || blocks.size() == 0) {
-      out.print("No datanodes contain blocks of file "+filename);
-      dfs.close();
-      return;
-    }
-    LocatedBlock lastBlk = blocks.get(blocks.size() - 1);
-    long blockSize = lastBlk.getBlock().getNumBytes();
-    long blockId = lastBlk.getBlock().getBlockId();
-    AccessToken accessToken = lastBlk.getAccessToken();
-    long genStamp = lastBlk.getBlock().getGenerationStamp();
-    DatanodeInfo chosenNode;
-    try {
-      chosenNode = JspHelper.bestNode(lastBlk);
-    } catch (IOException e) {
-      out.print(e.toString());
-      dfs.close();
-      return;
-    }      
-    InetSocketAddress addr = NetUtils.createSocketAddr(chosenNode.getName());
-    //view the last chunkSizeToView bytes while Tailing
-    final long startOffset = blockSize >= chunkSizeToView? blockSize - chunkSizeToView: 0;
-
-    out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
-    JspHelper.streamBlockInAscii(addr, blockId, accessToken, genStamp, blockSize, startOffset, chunkSizeToView, out);
-    out.print("</textarea>");
-    dfs.close();
-  }
-
-%>
-
-
-
 <html>
 <head>
 <%JspHelper.createTitle(out, request, request.getParameter("filename")); %>
 </head>
 <body>
 <form action="/tail.jsp" method="GET">
-<% 
-   generateFileChunks(out,request);
-%>
+<% DatanodeJspHelper.generateFileChunksForTail(out,request); %>
 </form>
 <hr>