You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ra...@apache.org on 2008/09/26 22:32:38 UTC

svn commit: r699492 - in /hadoop/core/branches/branch-0.17: CHANGES.txt src/java/org/apache/hadoop/fs/FSInputChecker.java src/test/org/apache/hadoop/dfs/TestDFSShell.java src/test/org/apache/hadoop/dfs/TestFSInputChecker.java

Author: rangadi
Date: Fri Sep 26 13:32:37 2008
New Revision: 699492

URL: http://svn.apache.org/viewvc?rev=699492&view=rev
Log:
HADOOP-4277. Checksum verification was mistakenly disabled for
LocalFileSystem. (Raghu Angadi)

Modified:
    hadoop/core/branches/branch-0.17/CHANGES.txt
    hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java
    hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestDFSShell.java
    hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java

Modified: hadoop/core/branches/branch-0.17/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/CHANGES.txt?rev=699492&r1=699491&r2=699492&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.17/CHANGES.txt Fri Sep 26 13:32:37 2008
@@ -1,5 +1,12 @@
 Hadoop Change Log
 
+Release 0.17.3 - Unreleased
+
+  BUG FIXES
+
+    HADOOP-4277. Checksum verification was mistakenly disabled for
+    LocalFileSystem. (Raghu Angadi)
+
 Release 0.17.2 - 2008-08-11
 
   BUG FIXES

Modified: hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java?rev=699492&r1=699491&r2=699492&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java (original)
+++ hadoop/core/branches/branch-0.17/src/java/org/apache/hadoop/fs/FSInputChecker.java Fri Sep 26 13:32:37 2008
@@ -37,7 +37,7 @@
   /** The file name from which data is read from */
   protected Path file;
   private Checksum sum;
-  private boolean verifyChecksum;
+  private boolean verifyChecksum = true;
   private byte[] buf;
   private byte[] checksum;
   private int pos;

Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestDFSShell.java?rev=699492&r1=699491&r2=699492&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestDFSShell.java (original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestDFSShell.java Fri Sep 26 13:32:37 2008
@@ -194,6 +194,9 @@
     final DistributedFileSystem dfs = (DistributedFileSystem)fs;
 
     try {
+      // remove left over crc files:
+      new File(TEST_ROOT_DIR, ".f1.crc").delete();
+      new File(TEST_ROOT_DIR, ".f2.crc").delete();    
       final File f1 = createLocalFile(new File(TEST_ROOT_DIR, "f1"));
       final File f2 = createLocalFile(new File(TEST_ROOT_DIR, "f2"));
   

Modified: hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java?rev=699492&r1=699491&r2=699492&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java (original)
+++ hadoop/core/branches/branch-0.17/src/test/org/apache/hadoop/dfs/TestFSInputChecker.java Fri Sep 26 13:32:37 2008
@@ -21,11 +21,14 @@
 import java.io.*;
 import java.util.Random;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.fs.ChecksumFileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
 
 /**
  * This class tests if FSInputChecker works correctly.
@@ -40,6 +43,7 @@
   byte[] expected = new byte[FILE_SIZE];
   byte[] actual;
   FSDataInputStream stm;
+  Random rand = new Random(seed);
 
   /* create a file */
   private void writeFile(FileSystem fileSys, Path name) throws IOException {
@@ -216,13 +220,70 @@
     cleanupFile(fileSys, file);
   }
   
+  private void testFileCorruption(LocalFileSystem fileSys) throws IOException {
+    // create a file and verify that checksum corruption results in 
+    // a checksum exception on LocalFS
+    
+    String dir = System.getProperty("test.build.data", ".");
+    Path file = new Path(dir + "/corruption-test.dat");
+    Path crcFile = new Path(dir + "/.corruption-test.dat.crc");
+    
+    writeFile(fileSys, file);
+    
+    int fileLen = (int)fileSys.getFileStatus(file).getLen();
+    
+    byte [] buf = new byte[fileLen];
+
+    InputStream in = fileSys.open(file);
+    IOUtils.readFully(in, buf, 0, buf.length);
+    in.close();
+    
+    // check .crc corruption
+    checkFileCorruption(fileSys, file, crcFile);
+    fileSys.delete(file, true);
+    
+    writeFile(fileSys, file);
+    
+    // check data corrutpion
+    checkFileCorruption(fileSys, file, file);
+    
+    fileSys.delete(file, true);
+  }
+  
+  private void checkFileCorruption(LocalFileSystem fileSys, Path file, 
+                                   Path fileToCorrupt) throws IOException {
+    
+    // corrupt the file 
+    RandomAccessFile out = 
+      new RandomAccessFile(new File(fileToCorrupt.toString()), "rw");
+    
+    byte[] buf = new byte[(int)fileSys.getFileStatus(file).getLen()];    
+    int corruptFileLen = (int)fileSys.getFileStatus(fileToCorrupt).getLen();
+    assertTrue(buf.length >= corruptFileLen);
+    
+    rand.nextBytes(buf);
+    out.seek(corruptFileLen/2);
+    out.write(buf, 0, corruptFileLen/4);
+    out.close();
+
+    boolean gotException = false;
+    
+    InputStream in = fileSys.open(file);
+    try {
+      IOUtils.readFully(in, buf, 0, buf.length);
+    } catch (ChecksumException e) {
+      gotException = true;
+    }
+    assertTrue(gotException);
+    in.close();    
+  }
+  
   public void testFSInputChecker() throws Exception {
     Configuration conf = new Configuration();
     conf.setLong("dfs.block.size", BLOCK_SIZE);
     conf.setInt("io.bytes.per.checksum", BYTES_PER_SUM);
     conf.set("fs.hdfs.impl",
              "org.apache.hadoop.dfs.ChecksumDistributedFileSystem");
-    Random rand = new Random(seed);
     rand.nextBytes(expected);
 
     // test DFS
@@ -242,6 +303,7 @@
     try {
       testChecker(fileSys, true);
       testChecker(fileSys, false);
+      testFileCorruption((LocalFileSystem)fileSys);
     }finally {
       fileSys.close();
     }