You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by ji...@apache.org on 2011/06/17 20:42:23 UTC

svn commit: r1136975 - in /hadoop/common/trunk/hdfs: CHANGES.txt src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java

Author: jitendra
Date: Fri Jun 17 18:42:23 2011
New Revision: 1136975

URL: http://svn.apache.org/viewvc?rev=1136975&view=rev
Log:
HDFS-1734. 'Chunk size to view' option is not working in Name Node UI. Contributed by Uma Maheswara Rao G.

Modified:
    hadoop/common/trunk/hdfs/CHANGES.txt
    hadoop/common/trunk/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java
    hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java

Modified: hadoop/common/trunk/hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/CHANGES.txt?rev=1136975&r1=1136974&r2=1136975&view=diff
==============================================================================
--- hadoop/common/trunk/hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hdfs/CHANGES.txt Fri Jun 17 18:42:23 2011
@@ -754,6 +754,9 @@ Trunk (unreleased changes)
     HDFS-1692. In secure mode, Datanode process doesn't exit when disks 
     fail. (Bharath Mundlapudi via suresh)
 
+    HDFS-1734. 'Chunk size to view' option is not working in Name Node UI.
+    (Uma Maheswara Rao G via jitendra)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java?rev=1136975&r1=1136974&r2=1136975&view=diff
==============================================================================
--- hadoop/common/trunk/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java (original)
+++ hadoop/common/trunk/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java Fri Jun 17 18:42:23 2011
@@ -241,6 +241,11 @@ public class DatanodeJspHelper {
     }
     datanodePort = Integer.parseInt(datanodePortStr);
 
+    final Long genStamp = JspHelper.validateLong(req.getParameter("genstamp"));
+    if (genStamp == null) {
+      out.print("Invalid input (genstamp absent)");
+      return;
+    }
     String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
     int namenodeInfoPort = -1;
     if (namenodeInfoPortStr != null)
@@ -322,6 +327,8 @@ public class DatanodeJspHelper {
         + startOffset + "\">");
     out.print("<input type=\"hidden\" name=\"filename\" value=\"" + filename
         + "\">");
+    out.print("<input type=\"hidden\" name=\"genstamp\" value=\"" + genStamp
+        + "\">");
     out.print("<input type=\"hidden\" name=\"datanodePort\" value=\""
         + datanodePort + "\">");
     out.print("<input type=\"hidden\" name=\"namenodeInfoPort\" value=\""

Modified: hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java?rev=1136975&r1=1136974&r2=1136975&view=diff
==============================================================================
--- hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java (original)
+++ hadoop/common/trunk/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java Fri Jun 17 18:42:23 2011
@@ -19,24 +19,30 @@ package org.apache.hadoop.hdfs.server.da
 
 import static org.junit.Assert.assertTrue;
 
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.URL;
 import java.net.URLEncoder;
 
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.jsp.JspWriter;
+
 import org.apache.commons.httpclient.util.URIUtil;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.junit.Test;
+import org.mockito.Mockito;
 
 public class TestDatanodeJsp {
   
   private static final String FILE_DATA = "foo bar baz biz buz";
+  private static final HdfsConfiguration CONF = new HdfsConfiguration();
   
   private static void testViewingFile(MiniDFSCluster cluster, String filePath,
       boolean doTail) throws IOException {
@@ -74,8 +80,7 @@ public class TestDatanodeJsp {
   public void testViewFileJsp() throws IOException {
     MiniDFSCluster cluster = null;
     try {
-      Configuration conf = new HdfsConfiguration();
-      cluster = new MiniDFSCluster.Builder(conf).build();
+      cluster = new MiniDFSCluster.Builder(CONF).build();
       cluster.waitActive();
       
       testViewingFile(cluster, "/test-file", false);
@@ -92,5 +97,49 @@ public class TestDatanodeJsp {
       }
     }
   }
+  
+  @Test
+  public void testGenStamp() throws Exception {
+    MiniDFSCluster cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(1)
+        .build();
+    try {
+      FileSystem fs = cluster.getFileSystem();
+      Path testFile = new Path("/test/mkdirs/TestchunkSizeToView");
+      writeFile(fs, testFile);
+      JspWriter writerMock = Mockito.mock(JspWriter.class);
+      HttpServletRequest reqMock = Mockito.mock(HttpServletRequest.class);
+      setTheMockExpectationsFromReq(testFile, reqMock);
+      DatanodeJspHelper.generateFileDetails(writerMock, reqMock, CONF);
+      Mockito.verify(writerMock, Mockito.atLeastOnce()).print(
+          "<input type=\"hidden\" name=\"genstamp\" value=\"987654321\">");
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  private void setTheMockExpectationsFromReq(Path testFile,
+      HttpServletRequest reqMock) {
+    Mockito.doReturn("987654321").when(reqMock).getParameter("genstamp");
+    Mockito.doReturn("1234").when(reqMock).getParameter("blockId");
+    Mockito.doReturn("8081").when(reqMock).getParameter("datanodePort");
+    Mockito.doReturn("8080").when(reqMock).getParameter("namenodeInfoPort");
+    Mockito.doReturn("100").when(reqMock).getParameter("chunkSizeToView");
+    Mockito.doReturn("1").when(reqMock).getParameter("startOffset");
+    Mockito.doReturn("1024").when(reqMock).getParameter("blockSize");
+    Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF)))
+        .when(reqMock).getParameter("nnaddr");
+    Mockito.doReturn(testFile.toString()).when(reqMock).getPathInfo();
+  }
+
+  static Path writeFile(FileSystem fs, Path f) throws IOException {
+    DataOutputStream out = fs.create(f);
+    try {
+      out.writeBytes("umamahesh: " + f);
+    } finally {
+      out.close();
+    }
+    assertTrue(fs.exists(f));
+    return f;
+  }
 
 }