You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by co...@apache.org on 2010/12/04 01:35:59 UTC

svn commit: r1042068 - in /hadoop/hdfs/branches/branch-0.22: ./ src/c++/libhdfs/ src/contrib/hdfsproxy/ src/java/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/hdfs/ src/test/hdfs/org/apache/hadoop/hdfs/ src/webapps/datanode/ src/webapps/hd...

Author: cos
Date: Sat Dec  4 00:35:59 2010
New Revision: 1042068

URL: http://svn.apache.org/viewvc?rev=1042068&view=rev
Log:
svn merge -c 1042067 from trunk

Modified:
    hadoop/hdfs/branches/branch-0.22/   (props changed)
    hadoop/hdfs/branches/branch-0.22/CHANGES.txt
    hadoop/hdfs/branches/branch-0.22/build.xml   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/c++/libhdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/contrib/hdfsproxy/   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/java/   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/test/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/test/hdfs/org/apache/hadoop/hdfs/TestLargeBlock.java
    hadoop/hdfs/branches/branch-0.22/src/webapps/datanode/   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/webapps/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.22/src/webapps/secondary/   (props changed)

Propchange: hadoop/hdfs/branches/branch-0.22/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs:713112
 /hadoop/hdfs/branches/HDFS-265:796829-820463
 /hadoop/hdfs/branches/branch-0.21:820487
-/hadoop/hdfs/trunk:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Modified: hadoop/hdfs/branches/branch-0.22/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.22/CHANGES.txt?rev=1042068&r1=1042067&r2=1042068&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.22/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.22/CHANGES.txt Sat Dec  4 00:35:59 2010
@@ -396,8 +396,6 @@ Release 0.22.0 - Unreleased
     HDFS-1524. Image loader should make sure to read every byte in image file.
     (hairong)
 
-    HDFS-1534. TestLargeBlock is failing on trunk. (cos)
-
 Release 0.21.1 - Unreleased
 
   IMPROVEMENTS

Propchange: hadoop/hdfs/branches/branch-0.22/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/build.xml:779102
 /hadoop/hdfs/branches/HDFS-265/build.xml:796829-820463
 /hadoop/hdfs/branches/branch-0.21/build.xml:820487
-/hadoop/hdfs/trunk/build.xml:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/build.xml:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/c++/libhdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/c++/libhdfs:713112
 /hadoop/core/trunk/src/c++/libhdfs:776175-784663
-/hadoop/hdfs/trunk/src/c++/libhdfs:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/c++/libhdfs:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/contrib/hdfsproxy/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/contrib/hdfsproxy:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/contrib/hdfsproxy:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy:820487
-/hadoop/hdfs/trunk/src/contrib/hdfsproxy:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/contrib/hdfsproxy:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/java:820487
-/hadoop/hdfs/trunk/src/java:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/java:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -4,4 +4,4 @@
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:820487
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/test/hdfs:776175-785643
 /hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/test/hdfs:820487
-/hadoop/hdfs/trunk/src/test/hdfs:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/test/hdfs:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Modified: hadoop/hdfs/branches/branch-0.22/src/test/hdfs/org/apache/hadoop/hdfs/TestLargeBlock.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.22/src/test/hdfs/org/apache/hadoop/hdfs/TestLargeBlock.java?rev=1042068&r1=1042067&r2=1042068&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.22/src/test/hdfs/org/apache/hadoop/hdfs/TestLargeBlock.java (original)
+++ hadoop/hdfs/branches/branch-0.22/src/test/hdfs/org/apache/hadoop/hdfs/TestLargeBlock.java Sat Dec  4 00:35:59 2010
@@ -17,70 +17,72 @@
  */
 package org.apache.hadoop.hdfs;
 
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
 import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.Random;
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.log4j.Level;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdfs.server.datanode.FSDataset;
 import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.log4j.Level;
 
 import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 /**
  * This class tests that blocks can be larger than 2GB
  */
-public class TestLargeBlock {
-/**
+public class TestLargeBlock extends junit.framework.TestCase {
+  static final String DIR = "/" + TestLargeBlock.class.getSimpleName() + "/";
+
   {
-    ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)TestLargeBlock.LOG).getLogger().setLevel(Level.ALL);
+    // ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL);
+    // ((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
+    // ((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
+    // ((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
   }
- */
-  private static final Log LOG = LogFactory.getLog(TestLargeBlock.class);
 
-  // should we verify the data read back from the file? (slow)
-  static final boolean verifyData = true;
+
+  static final boolean verifyData = true; // should we verify the data read back from the file? (slow)
   static final byte[] pattern = { 'D', 'E', 'A', 'D', 'B', 'E', 'E', 'F'};
   static final boolean simulatedStorage = false;
-  private static final String ALLOWED_VM = "64";
 
   // creates a file 
-  static FSDataOutputStream createFile(FileSystem fileSys, Path name, int repl,
-                                       final long blockSize)
+  static FSDataOutputStream createFile(FileSystem fileSys, Path name, int repl, final long blockSize)
     throws IOException {
     FSDataOutputStream stm = fileSys.create(name, true,
-        fileSys.getConf().getInt("io.file.buffer.size", 4096),
-        (short)repl, blockSize);
-    LOG.info("createFile: Created " + name + " with " + repl + " replica.");
+                                            fileSys.getConf().getInt("io.file.buffer.size", 4096),
+                                            (short)repl, blockSize);
+    System.out.println("createFile: Created " + name + " with " + repl + " replica.");
     return stm;
   }
 
+
   /**
    * Writes pattern to file
-   * @param stm FSDataOutputStream to write the file
-   * @param fileSize size of the file to be written
-   * @throws IOException in case of errors
    */
-  static void writeFile(FSDataOutputStream stm, final long fileSize)
-      throws IOException {
-    // write in chunks of 64 MB
-    final int writeSize = pattern.length * 8 * 1024 * 1024;
+  static void writeFile(FSDataOutputStream stm, final long fileSize) throws IOException {
+    final int writeSize = pattern.length * 8 * 1024 * 1024; // write in chunks of 64 MB
+    final int writeCount = (int) ((fileSize / ((long) writeSize)) + ((fileSize % ((long) writeSize) == 0L) ? 0L : 1L));
 
     if (writeSize > Integer.MAX_VALUE) {
       throw new IOException("A single write is too large " + writeSize);
@@ -94,26 +96,24 @@ public class TestLargeBlock {
       b[j] = pattern[j % pattern.length];
     }
 
+    int i = 0;
+
     while (bytesToWrite > 0) {
-      // how many bytes we are writing in this iteration
-      int thiswrite = (int) Math.min(writeSize, bytesToWrite);
+      int thiswrite = (int) Math.min(writeSize, bytesToWrite); // how many bytes we are writing in this iteration
 
       stm.write(b, 0, thiswrite);
+      // System.out.println("Wrote[" + i + "/" + writeCount + "] " + thiswrite + " bytes.");
       bytesToWrite -= thiswrite;
+      i++;
     }
   }
 
   /**
    * Reads from file and makes sure that it matches the pattern
-   * @param fs a reference to FileSystem
-   * @param name Path of a file
-   * @param fileSize size of the file
-   * @throws IOException in case of errors
    */
-  static void checkFullFile(FileSystem fs, Path name, final long fileSize)
-      throws IOException {
-    // read in chunks of 128 MB
-    final int readSize = pattern.length * 16 * 1024 * 1024;
+  static void checkFullFile(FileSystem fs, Path name, final long fileSize) throws IOException {
+    final int readSize = pattern.length * 16 * 1024 * 1024; // read in chunks of 128 MB
+    final int readCount = (int) ((fileSize / ((long) readSize)) + ((fileSize % ((long) readSize) == 0L) ? 0L : 1L));
 
     if (readSize > Integer.MAX_VALUE) {
       throw new IOException("A single read is too large " + readSize);
@@ -131,51 +131,48 @@ public class TestLargeBlock {
       }
     }
 
+
     FSDataInputStream stm = fs.open(name);
 
+    int i = 0;
+
     while (bytesToRead > 0) {
-      // how many bytes we are reading in this iteration
-      int thisread = (int) Math.min(readSize, bytesToRead);
+      int thisread = (int) Math.min(readSize, bytesToRead); // how many bytes we are reading in this iteration
 
       stm.readFully(b, 0, thisread); 
       
       if (verifyData) {
         // verify data read
+        
         if (thisread == readSize) {
-          assertTrue("file is corrupted at or after byte " +
-              (fileSize - bytesToRead), Arrays.equals(b, compb));
+          assertTrue("file corrupted at or after byte " + (fileSize - bytesToRead), Arrays.equals(b, compb));
         } else {
           // b was only partially filled by last read
           for (int k = 0; k < thisread; k++) {
-            assertTrue("file is corrupted at or after byte " +
-                (fileSize - bytesToRead), b[k] == compb[k]);
+            assertTrue("file corrupted at or after byte " + (fileSize - bytesToRead), b[k] == compb[k]);
           }
         }
       }
-      LOG.debug("Before update: to read: " + bytesToRead +
-          "; read already: "+ thisread);
+
+      // System.out.println("Read[" + i + "/" + readCount + "] " + thisread + " bytes.");
+
       bytesToRead -= thisread;
-      LOG.debug("After  update: to read: " + bytesToRead +
-          "; read already: " + thisread);
+      i++;
     }
     stm.close();
   }
  
   /**
    * Test for block size of 2GB + 512B
-   * @throws IOException in case of errors
    */
   @Test
   public void testLargeBlockSize() throws IOException {
-    assumeTrue(ALLOWED_VM.equals(System.getProperty("sun.arch.data.model")));
     final long blockSize = 2L * 1024L * 1024L * 1024L + 512L; // 2GB + 512B
     runTest(blockSize);
   }
   
   /**
    * Test that we can write to and read from large blocks
-   * @param blockSize size of the block
-   * @throws IOException in case of errors
    */
   public void runTest(final long blockSize) throws IOException {
 
@@ -191,12 +188,11 @@ public class TestLargeBlock {
     try {
 
       // create a new file in test data directory
-      Path file1 = new Path(System.getProperty("test.build.data") + "/" +
-          Long.toString(blockSize) + ".dat");
+      Path file1 = new Path(System.getProperty("test.build.data") + "/" + Long.toString(blockSize) + ".dat");
       FSDataOutputStream stm = createFile(fs, file1, 1, blockSize);
-      LOG.info("File " + file1 + " created with file size " +
-          fileSize +
-          " blocksize " + blockSize);
+      System.out.println("File " + file1 + " created with file size " +
+                         fileSize +
+                         " blocksize " + blockSize);
 
       // verify that file exists in FS namespace
       assertTrue(file1 + " should be a file", 
@@ -204,11 +200,11 @@ public class TestLargeBlock {
 
       // write to file
       writeFile(stm, fileSize);
-      LOG.info("File " + file1 + " written to.");
+      System.out.println("File " + file1 + " written to.");
 
       // close file
       stm.close();
-      LOG.info("File " + file1 + " closed.");
+      System.out.println("File " + file1 + " closed.");
 
       // Make sure a client can read it
       checkFullFile(fs, file1, fileSize);

Propchange: hadoop/hdfs/branches/branch-0.22/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/webapps/datanode:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/datanode:820487
-/hadoop/hdfs/trunk/src/webapps/datanode:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/webapps/datanode:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/webapps/hdfs:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs:820487
-/hadoop/hdfs/trunk/src/webapps/hdfs:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/webapps/hdfs:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067

Propchange: hadoop/hdfs/branches/branch-0.22/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Dec  4 00:35:59 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/webapps/secondary:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/secondary:820487
-/hadoop/hdfs/trunk/src/webapps/secondary:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261
+/hadoop/hdfs/trunk/src/webapps/secondary:1036213,1036303,1036310,1036631,1036767,1037047,1037109,1037961,1039957,1040005,1040411,1041190,1041261,1042067