You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by to...@apache.org on 2011/10/09 05:59:21 UTC

svn commit: r1180541 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/test/java/org/apache/hadoop/hdfs/ src/test/java/org/apache/hadoop/hdfs/server/namenode/

Author: todd
Date: Sun Oct  9 03:59:21 2011
New Revision: 1180541

URL: http://svn.apache.org/viewvc?rev=1180541&view=rev
Log:
HDFS-2414. Fix TestDFSRollback to avoid spurious failures. Contributed by Todd Lipcon.

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1180541&r1=1180540&r2=1180541&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Sun Oct  9 03:59:21 2011
@@ -1128,6 +1128,8 @@ Release 0.23.0 - Unreleased
     HDFS-2412. Add backwards-compatibility layer for renamed FSConstants
                class (todd)
 
+    HDFS-2414. Fix TestDFSRollback to avoid spurious failures. (todd)
+
   BREAKDOWN OF HDFS-1073 SUBTASKS
 
     HDFS-1521. Persist transaction ID on disk between NN restarts.

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java?rev=1180541&r1=1180540&r2=1180541&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java Sun Oct  9 03:59:21 2011
@@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
 import org.apache.hadoop.util.StringUtils;
 
+import com.google.common.base.Charsets;
 import com.google.common.collect.Lists;
 
 /**
@@ -263,10 +264,14 @@ public class TestDFSRollback extends Tes
       UpgradeUtilities.createNameNodeStorageDirs(nameNodeDirs, "current");
       baseDirs = UpgradeUtilities.createNameNodeStorageDirs(nameNodeDirs, "previous");
       for (File f : baseDirs) { 
-        UpgradeUtilities.corruptFile(new File(f,"VERSION")); 
+        UpgradeUtilities.corruptFile(
+            new File(f,"VERSION"),
+            "layoutVersion".getBytes(Charsets.UTF_8),
+            "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
       }
       startNameNodeShouldFail(StartupOption.ROLLBACK,
           "file VERSION has layoutVersion missing");
+
       UpgradeUtilities.createEmptyDirs(nameNodeDirs);
       
       log("NameNode rollback with old layout version in previous", numDirs);

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java?rev=1180541&r1=1180540&r2=1180541&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java Sun Oct  9 03:59:21 2011
@@ -39,6 +39,7 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
+import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
 
 import static org.junit.Assert.*;
@@ -303,7 +304,10 @@ public class TestDFSUpgrade {
       log("NameNode upgrade with corrupt version file", numDirs);
       baseDirs = UpgradeUtilities.createNameNodeStorageDirs(nameNodeDirs, "current");
       for (File f : baseDirs) { 
-        UpgradeUtilities.corruptFile(new File (f,"VERSION")); 
+        UpgradeUtilities.corruptFile(
+            new File(f,"VERSION"),
+            "layoutVersion".getBytes(Charsets.UTF_8),
+            "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
       }
       startNameNodeShouldFail(StartupOption.UPGRADE);
       UpgradeUtilities.createEmptyDirs(nameNodeDirs);

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java?rev=1180541&r1=1180540&r2=1180541&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java Sun Oct  9 03:59:21 2011
@@ -24,10 +24,8 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.OutputStream;
-import java.io.RandomAccessFile;
 import java.net.URI;
 import java.util.Arrays;
-import java.util.Random;
 import java.util.Collections;
 import java.util.zip.CRC32;
 import org.apache.hadoop.conf.Configuration;
@@ -53,6 +51,10 @@ import org.apache.hadoop.hdfs.server.dat
 import org.apache.hadoop.hdfs.server.namenode.NNStorage;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 
+import com.google.common.base.Preconditions;
+import com.google.common.io.Files;
+import com.google.common.primitives.Bytes;
+
 /**
  * This class defines a number of static helper methods used by the
  * DFS Upgrade unit tests.  By default, a singleton master populated storage
@@ -483,20 +485,26 @@ public class UpgradeUtilities {
    * @throws IllegalArgumentException if the given file is not a file
    * @throws IOException if an IOException occurs while reading or writing the file
    */
-  public static void corruptFile(File file) throws IOException {
+  public static void corruptFile(File file,
+      byte[] stringToCorrupt,
+      byte[] replacement) throws IOException {
+    Preconditions.checkArgument(replacement.length == stringToCorrupt.length);
     if (!file.isFile()) {
       throw new IllegalArgumentException(
-                                         "Given argument is not a file:" + file);
+          "Given argument is not a file:" + file);
     }
-    RandomAccessFile raf = new RandomAccessFile(file,"rws");
-    Random random = new Random();
-    for (long i = 0; i < raf.length(); i++) {
-      raf.seek(i);
-      if (random.nextBoolean()) {
-        raf.writeByte(random.nextInt());
-      }
+    byte[] data = Files.toByteArray(file);
+    int index = Bytes.indexOf(data, stringToCorrupt);
+    if (index == -1) {
+      throw new IOException(
+          "File " + file + " does not contain string " +
+          new String(stringToCorrupt));
+    }
+
+    for (int i = 0; i < stringToCorrupt.length; i++) {
+      data[index + i] = replacement[i];
     }
-    raf.close();
+    Files.write(data, file);
   }
   
   /**

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java?rev=1180541&r1=1180540&r2=1180541&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java Sun Oct  9 03:59:21 2011
@@ -29,6 +29,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 
@@ -233,12 +234,50 @@ public abstract class FSImageTestUtil {
         // recurse
         assertParallelFilesAreIdentical(sameNameList, ignoredFileNames);
       } else {
-        assertFileContentsSame(sameNameList.toArray(new File[0]));
+        if ("VERSION".equals(sameNameList.get(0).getName())) {
+          assertPropertiesFilesSame(sameNameList.toArray(new File[0]));
+        } else {
+          assertFileContentsSame(sameNameList.toArray(new File[0]));
+        }
       }
     }  
   }
   
   /**
+   * Assert that a set of properties files all contain the same data.
+   * We cannot simply check the md5sums here, since Properties files
+   * contain timestamps -- thus, two properties files from the same
+   * saveNamespace operation may actually differ in md5sum.
+   * @param propFiles the files to compare
+   * @throws IOException if the files cannot be opened or read
+   * @throws AssertionError if the files differ
+   */
+  public static void assertPropertiesFilesSame(File[] propFiles)
+      throws IOException {
+    Set<Map.Entry<Object, Object>> prevProps = null;
+    
+    for (File f : propFiles) {
+      Properties props;
+      FileInputStream is = new FileInputStream(f);
+      try {
+        props = new Properties();
+        props.load(is);
+      } finally {
+        IOUtils.closeStream(is);
+      }
+      if (prevProps == null) {
+        prevProps = props.entrySet();
+      } else {
+        Set<Entry<Object,Object>> diff =
+          Sets.symmetricDifference(prevProps, props.entrySet());
+        if (!diff.isEmpty()) {
+          fail("Properties file " + f + " differs from " + propFiles[0]);
+        }
+      }
+    }
+  }
+
+  /**
    * Assert that all of the given paths have the exact same
    * contents 
    */