You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2012/02/10 02:49:23 UTC

svn commit: r1242635 - in /hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common: ./ src/main/bin/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/shell/ src/main/java/org/apache/hadoo...

Author: todd
Date: Fri Feb 10 01:49:08 2012
New Revision: 1242635

URL: http://svn.apache.org/viewvc?rev=1242635&view=rev
Log:
Merge trunk into HA branch

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
      - copied unchanged from r1242632, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java
      - copied unchanged from r1242632, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Fri Feb 10 01:49:08 2012
@@ -13,6 +13,16 @@ Trunk (unreleased changes)
     (suresh)
 
   IMPROVEMENTS
+    HADOOP-8048. Allow merging of Credentials (Daryn Sharp via tgraves)
+ 
+    HADOOP-8032. mvn site:stage-deploy should be able to use the scp protocol
+    to stage documents (Ravi Prakash via tgraves)
+
+    HADOOP-8017. Configure hadoop-main pom to get rid of M2E plugin execution
+    not covered (Eric Charles via bobby)
+
+    HADOOP-8015. ChRootFileSystem should extend FilterFileSystem 
+    (Daryn Sharp via bobby)
 
     HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
 
@@ -79,12 +89,19 @@ Trunk (unreleased changes)
     HADOOP-7988. Upper case in hostname part of the principals doesn't work with 
     kerberos. (jitendra)
 
-  BUGS
-    HADOOP-7998. CheckFileSystem does not correctly honor setVerifyChecksum
-                 (Daryn Sharp via bobby)
+  BUG FIXES
 
-    HADOOP-7851. Configuration.getClasses() never returns the default value. 
-                 (Uma Maheswara Rao G via amarrk)
+    HADOOP-8018.  Hudson auto test for HDFS has started throwing javadoc
+    (Jon Eagles via bobby)
+
+    HADOOP-8001  ChecksumFileSystem's rename doesn't correctly handle checksum
+    files. (Daryn Sharp via bobby)
+
+    HADOOP-8006  TestFSInputChecker is failing in trunk.
+    (Daryn Sharp via bobby)
+
+    HADOOP-7998. CheckFileSystem does not correctly honor setVerifyChecksum
+    (Daryn Sharp via bobby)
 
     HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
     by Jersey (Alejandro Abdelnur via atm)
@@ -137,7 +154,27 @@ Trunk (unreleased changes)
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
-Release 0.23.1 - Unreleased
+Release 0.23.2 - UNRELEASED 
+
+  NEW FEATURES                                                                    
+  
+  IMPROVEMENTS
+
+    HADOOP-7923. Automate the updating of version numbers in the doc system.
+    (szetszwo)
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-8042  When copying a file out of HDFS, modifying it, and uploading
+    it back into HDFS, the put fails due to a CRC mismatch
+    (Daryn Sharp via bobby)
+
+    HADOOP-8035 Hadoop Maven site is inefficient and runs phases redundantly
+    (abayer via tucu)
+
+Release 0.23.1 - 2012-02-08 
 
   INCOMPATIBLE CHANGES
 
@@ -221,6 +258,14 @@ Release 0.23.1 - Unreleased
     HADOOP-8002. SecurityUtil acquired token message should be a debug rather than info.
     (Arpit Gupta via mahadev)
 
+    HADOOP-8009. Create hadoop-client and hadoop-minicluster artifacts for downstream 
+    projects. (tucu)
+
+    HADOOP-7470. Move up to Jackson 1.8.8.  (Enis Soztutar via szetszwo)
+
+    HADOOP-8027. Visiting /jmx on the daemon web interfaces may print
+    unnecessary error in logs. (atm)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -228,6 +273,14 @@ Release 0.23.1 - Unreleased
    HADOOP-7811. TestUserGroupInformation#testGetServerSideGroups test fails in chroot.
    (Jonathan Eagles via mahadev)
 
+   HADOOP-7813. Fix test-patch to use proper numerical comparison when checking
+   javadoc and findbugs warning counts. (Jonathan Eagles via tlipcon)
+
+   HADOOP-7841. Run tests with non-secure random. (tlipcon)
+
+    HADOOP-7851. Configuration.getClasses() never returns the default value. 
+                 (Uma Maheswara Rao G via amarrk)
+
    HADOOP-7787. Make source tarball use conventional name.
    (Bruno Mahé via tomwhite)
 
@@ -311,6 +364,15 @@ Release 0.23.1 - Unreleased
    HADOOP-8000. fetchdt command not available in bin/hadoop.
    (Arpit Gupta via mahadev)
 
+   HADOOP-7999. "hadoop archive" fails with ClassNotFoundException.
+   (Jason Lowe via mahadev)
+
+   HADOOP-8012. hadoop-daemon.sh and yarn-daemon.sh are trying to mkdir
+   and chown log/pid dirs which can fail. (Roman Shaposhnik via eli)
+
+   HADOOP-8013. ViewFileSystem does not honor setVerifyChecksum
+   (Daryn Sharp via bobby)
+
 Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Feb 10 01:49:08 2012
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1237154
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1242632
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh Fri Feb 10 01:49:08 2012
@@ -20,6 +20,16 @@
 # Resolve links ($0 may be a softlink) and convert a relative path
 # to an absolute path.  NB: The -P option requires bash built-ins
 # or POSIX:2001 compliant cd and pwd.
+
+#   HADOOP_CLASSPATH Extra Java CLASSPATH entries.
+#
+#   HADOOP_USER_CLASSPATH_FIRST      When defined, the HADOOP_CLASSPATH is 
+#                                    added in the beginning of the global
+#                                    classpath. Can be defined, for example,
+#                                    by doing 
+#                                    export HADOOP_USER_CLASSPATH_FIRST=true
+#
+
 this="${BASH_SOURCE-$0}"
 common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
 script="$(basename -- "$this")"
@@ -153,6 +163,10 @@ fi
 # CLASSPATH initially contains $HADOOP_CONF_DIR
 CLASSPATH="${HADOOP_CONF_DIR}"
 
+if [ "$HADOOP_USER_CLASSPATH_FIRST" != "" ] && [ "$HADOOP_CLASSPATH" != "" ] ; then
+  CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
+fi
+
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
@@ -174,7 +188,7 @@ fi
 CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/$HADOOP_COMMON_DIR'/*'
 
 # add user-specified CLASSPATH last
-if [ "$HADOOP_CLASSPATH" != "" ]; then
+if [ "$HADOOP_USER_CLASSPATH_FIRST" = "" ] && [ "$HADOOP_CLASSPATH" != "" ]; then
   CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
 fi
 
@@ -214,6 +228,9 @@ if [ -d "${HADOOP_PREFIX}/build/native" 
   fi
 fi
 
+# setup a default TOOL_PATH
+TOOL_PATH="${TOOL_PATH:-$HADOOP_PREFIX/share/hadoop/tools/lib/*}"
+
 # cygwin path translation
 if $cygwin; then
   JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh Fri Feb 10 01:49:08 2012
@@ -95,8 +95,11 @@ fi
 if [ "$HADOOP_LOG_DIR" = "" ]; then
   export HADOOP_LOG_DIR="$HADOOP_PREFIX/logs"
 fi
-mkdir -p "$HADOOP_LOG_DIR"
-chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR
+
+if [ ! -w "$HADOOP_LOG_DIR" ] ; then
+  mkdir -p "$HADOOP_LOG_DIR"
+  chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR
+fi
 
 if [ "$HADOOP_PID_DIR" = "" ]; then
   HADOOP_PID_DIR=/tmp
@@ -118,7 +121,7 @@ case $startStop in
 
   (start)
 
-    mkdir -p "$HADOOP_PID_DIR"
+    [ -w "$HADOOP_PID_DIR" ] ||  mkdir -p "$HADOOP_PID_DIR"
 
     if [ -f $pid ]; then
       if kill -0 `cat $pid` > /dev/null 2>&1; then

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Feb 10 01:49:08 2012
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1237154
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1242632
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Feb 10 01:49:08 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1237154
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1242632
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java Fri Feb 10 01:49:08 2012
@@ -43,6 +43,7 @@ public abstract class ChecksumFileSystem
   private static final byte[] CHECKSUM_VERSION = new byte[] {'c', 'r', 'c', 0};
   private int bytesPerChecksum = 512;
   private boolean verifyChecksum = true;
+  private boolean writeChecksum = true;
 
   public static double getApproxChkSumLength(long size) {
     return ChecksumFSOutputSummer.CHKSUM_AS_FRACTION * size;
@@ -67,6 +68,11 @@ public abstract class ChecksumFileSystem
     this.verifyChecksum = verifyChecksum;
   }
 
+  @Override
+  public void setWriteChecksum(boolean writeChecksum) {
+    this.writeChecksum = writeChecksum;
+  }
+  
   /** get the raw file system */
   public FileSystem getRawFileSystem() {
     return fs;
@@ -119,7 +125,6 @@ public abstract class ChecksumFileSystem
     private static final int HEADER_LENGTH = 8;
     
     private int bytesPerSum = 1;
-    private long fileLen = -1L;
     
     public ChecksumFSInputChecker(ChecksumFileSystem fs, Path file)
       throws IOException {
@@ -244,6 +249,24 @@ public abstract class ChecksumFileSystem
       }
       return nread;
     }
+  }
+  
+  private static class FSDataBoundedInputStream extends FSDataInputStream {
+    private FileSystem fs;
+    private Path file;
+    private long fileLen = -1L;
+
+    FSDataBoundedInputStream(FileSystem fs, Path file, InputStream in)
+        throws IOException {
+      super(in);
+      this.fs = fs;
+      this.file = file;
+    }
+    
+    @Override
+    public boolean markSupported() {
+      return false;
+    }
     
     /* Return the file length */
     private long getFileLength() throws IOException {
@@ -304,9 +327,16 @@ public abstract class ChecksumFileSystem
    */
   @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
-    return verifyChecksum
-      ? new FSDataInputStream(new ChecksumFSInputChecker(this, f, bufferSize))
-      : getRawFileSystem().open(f, bufferSize);
+    FileSystem fs;
+    InputStream in;
+    if (verifyChecksum) {
+      fs = this;
+      in = new ChecksumFSInputChecker(this, f, bufferSize);
+    } else {
+      fs = getRawFileSystem();
+      in = fs.open(f, bufferSize);
+    }
+    return new FSDataBoundedInputStream(fs, f, in);
   }
 
   /** {@inheritDoc} */
@@ -404,9 +434,20 @@ public abstract class ChecksumFileSystem
         throw new IOException("Mkdirs failed to create " + parent);
       }
     }
-    final FSDataOutputStream out = new FSDataOutputStream(
-        new ChecksumFSOutputSummer(this, f, overwrite, bufferSize, replication,
-            blockSize, progress), null);
+    final FSDataOutputStream out;
+    if (writeChecksum) {
+      out = new FSDataOutputStream(
+          new ChecksumFSOutputSummer(this, f, overwrite, bufferSize, replication,
+              blockSize, progress), null);
+    } else {
+      out = fs.create(f, permission, overwrite, bufferSize, replication,
+          blockSize, progress);
+      // remove the checksum file since we aren't writing one
+      Path checkFile = getChecksumFile(f);
+      if (fs.exists(checkFile)) {
+        fs.delete(checkFile, true);
+      }
+    }
     if (permission != null) {
       setPermission(f, permission);
     }
@@ -450,18 +491,21 @@ public abstract class ChecksumFileSystem
     if (fs.isDirectory(src)) {
       return fs.rename(src, dst);
     } else {
+      if (fs.isDirectory(dst)) {
+        dst = new Path(dst, src.getName());
+      }
 
       boolean value = fs.rename(src, dst);
       if (!value)
         return false;
 
-      Path checkFile = getChecksumFile(src);
-      if (fs.exists(checkFile)) { //try to rename checksum
-        if (fs.isDirectory(dst)) {
-          value = fs.rename(checkFile, dst);
-        } else {
-          value = fs.rename(checkFile, getChecksumFile(dst));
-        }
+      Path srcCheckFile = getChecksumFile(src);
+      Path dstCheckFile = getChecksumFile(dst);
+      if (fs.exists(srcCheckFile)) { //try to rename checksum
+        value = fs.rename(srcCheckFile, dstCheckFile);
+      } else if (fs.exists(dstCheckFile)) {
+        // no src checksum, so remove dst checksum
+        value = fs.delete(dstCheckFile, true); 
       }
 
       return value;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri Feb 10 01:49:08 2012
@@ -1937,6 +1937,15 @@ public abstract class FileSystem extends
   }
 
   /**
+   * Set the write checksum flag. This is only applicable if the 
+   * corresponding FileSystem supports checksum. By default doesn't do anything.
+   * @param writeChecksum
+   */
+  public void setWriteChecksum(boolean writeChecksum) {
+    //doesn't do anything
+  }
+
+  /**
    * Return a list of file status objects that corresponds to the list of paths
    * excluding those non-existent paths.
    * 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java Fri Feb 10 01:49:08 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs;
 
 import java.io.*;
 import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.EnumSet;
 import java.util.List;
 
@@ -51,6 +52,7 @@ import org.apache.hadoop.util.Progressab
 public class FilterFileSystem extends FileSystem {
   
   protected FileSystem fs;
+  private String swapScheme;
   
   /*
    * so that extending classes can define it
@@ -63,13 +65,25 @@ public class FilterFileSystem extends Fi
     this.statistics = fs.statistics;
   }
 
+  /**
+   * Get the raw file system 
+   * @return FileSystem being filtered
+   */
+  public FileSystem getRawFileSystem() {
+    return fs;
+  }
+
   /** Called after a new FileSystem instance is constructed.
    * @param name a uri whose authority section names the host, port, etc.
    *   for this FileSystem
    * @param conf the configuration
    */
   public void initialize(URI name, Configuration conf) throws IOException {
-    fs.initialize(name, conf);
+    super.initialize(name, conf);
+    String scheme = name.getScheme();
+    if (!scheme.equals(fs.getUri().getScheme())) {
+      swapScheme = scheme;
+    }
   }
 
   /** Returns a URI whose scheme and authority identify this FileSystem.*/
@@ -88,7 +102,19 @@ public class FilterFileSystem extends Fi
   
   /** Make sure that a path specifies a FileSystem. */
   public Path makeQualified(Path path) {
-    return fs.makeQualified(path);
+    Path fqPath = fs.makeQualified(path);
+    // swap in our scheme if the filtered fs is using a different scheme
+    if (swapScheme != null) {
+      try {
+        // NOTE: should deal with authority, but too much other stuff is broken 
+        fqPath = new Path(
+            new URI(swapScheme, fqPath.toUri().getSchemeSpecificPart(), null)
+        );
+      } catch (URISyntaxException e) {
+        throw new IllegalArgumentException(e);
+      }
+    }
+    return fqPath;
   }
   
   ///////////////////////////////////////////////////////////////
@@ -334,6 +360,11 @@ public class FilterFileSystem extends Fi
   public void setVerifyChecksum(boolean verifyChecksum) {
     fs.setVerifyChecksum(verifyChecksum);
   }
+  
+  @Override
+  public void setWriteChecksum(boolean writeChecksum) {
+    fs.setVerifyChecksum(writeChecksum);
+  }
 
   @Override
   public Configuration getConf() {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java Fri Feb 10 01:49:08 2012
@@ -24,6 +24,7 @@ import java.util.*;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
 
 /****************************************************************
  * Implement the FileSystem API for the checksumed local filesystem.
@@ -34,21 +35,26 @@ import org.apache.hadoop.classification.
 public class LocalFileSystem extends ChecksumFileSystem {
   static final URI NAME = URI.create("file:///");
   static private Random rand = new Random();
-  FileSystem rfs;
   
   public LocalFileSystem() {
     this(new RawLocalFileSystem());
   }
   
   public FileSystem getRaw() {
-    return rfs;
+    return getRawFileSystem();
   }
     
   public LocalFileSystem(FileSystem rawLocalFileSystem) {
     super(rawLocalFileSystem);
-    rfs = rawLocalFileSystem;
   }
     
+  @Override
+  public void initialize(URI uri, Configuration conf) throws IOException {
+    super.initialize(uri, conf);
+    // ctor didn't initialize the filtered fs
+    getRawFileSystem().initialize(uri, conf);
+  }
+  
   /** Convert a path to a File. */
   public File pathToFile(Path path) {
     return ((RawLocalFileSystem)fs).pathToFile(path);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Fri Feb 10 01:49:08 2012
@@ -41,7 +41,9 @@ import org.apache.hadoop.io.IOUtils;
  */
 abstract class CommandWithDestination extends FsCommand {  
   protected PathData dst;
-  protected boolean overwrite = false;
+  private boolean overwrite = false;
+  private boolean verifyChecksum = true;
+  private boolean writeChecksum = true;
   
   /**
    * 
@@ -53,6 +55,14 @@ abstract class CommandWithDestination ex
     overwrite = flag;
   }
   
+  protected void setVerifyChecksum(boolean flag) {
+    verifyChecksum = flag;
+  }
+  
+  protected void setWriteChecksum(boolean flag) {
+    writeChecksum = flag;
+  }
+  
   /**
    *  The last arg is expected to be a local path, if only one argument is
    *  given then the destination will be the current directory 
@@ -201,6 +211,7 @@ abstract class CommandWithDestination ex
    * @throws IOException if copy fails
    */ 
   protected void copyFileToTarget(PathData src, PathData target) throws IOException {
+    src.fs.setVerifyChecksum(verifyChecksum);
     copyStreamToTarget(src.fs.open(src.path), target);
   }
   
@@ -217,6 +228,7 @@ abstract class CommandWithDestination ex
     if (target.exists && (target.stat.isDirectory() || !overwrite)) {
       throw new PathExistsException(target.toString());
     }
+    target.fs.setWriteChecksum(writeChecksum);
     PathData tempFile = null;
     try {
       tempFile = target.createTempFile(target+"._COPYING_");

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Fri Feb 10 01:49:08 2012
@@ -25,7 +25,6 @@ import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.ChecksumFileSystem;
 import org.apache.hadoop.fs.FileUtil;
 
 /** Various commands for copy files */
@@ -103,43 +102,17 @@ class CopyCommands {  
       "to the local name.  <src> is kept.  When copying multiple,\n" +
       "files, the destination must be a directory.";
 
-    /**
-     * The prefix for the tmp file used in copyToLocal.
-     * It must be at least three characters long, required by
-     * {@link java.io.File#createTempFile(String, String, File)}.
-     */
-    private boolean copyCrc;
-    private boolean verifyChecksum;
-
     @Override
     protected void processOptions(LinkedList<String> args)
     throws IOException {
       CommandFormat cf = new CommandFormat(
           1, Integer.MAX_VALUE, "crc", "ignoreCrc");
       cf.parse(args);
-      copyCrc = cf.getOpt("crc");
-      verifyChecksum = !cf.getOpt("ignoreCrc");
-      
+      setWriteChecksum(cf.getOpt("crc"));
+      setVerifyChecksum(!cf.getOpt("ignoreCrc"));
       setRecursive(true);
       getLocalDestination(args);
     }
-
-    @Override
-    protected void copyFileToTarget(PathData src, PathData target)
-    throws IOException {
-      src.fs.setVerifyChecksum(verifyChecksum);
-
-      if (copyCrc && !(src.fs instanceof ChecksumFileSystem)) {
-        displayWarning(src.fs + ": Does not support checksums");
-        copyCrc = false;
-      }      
-
-      super.copyFileToTarget(src, target);
-      if (copyCrc) {
-        // should we delete real file if crc copy fails?
-        super.copyFileToTarget(src.getChecksumFile(), target.getChecksumFile());
-      }
-    }
   }
 
   /**

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java Fri Feb 10 01:49:08 2012
@@ -27,7 +27,6 @@ import java.net.URISyntaxException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.ChecksumFileSystem;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -170,19 +169,6 @@ public class PathData {
   }
   
   /**
-   * Return the corresponding crc data for a file.  Avoids exposing the fs
-   * contortions to the caller.  
-   * @return PathData of the crc file
-   * @throws IOException is anything goes wrong
-   */
-  public PathData getChecksumFile() throws IOException {
-    checkIfExists(FileTypeRequirement.SHOULD_NOT_BE_DIRECTORY);
-    ChecksumFileSystem srcFs = (ChecksumFileSystem)fs;
-    Path srcPath = srcFs.getChecksumFile(path);
-    return new PathData(srcFs.getRawFileSystem(), srcPath.toString());
-  }
-
-  /**
    * Returns a temporary file for this PathData with the given extension.
    * The file will be deleted on exit.
    * @param extension for the temporary file

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java Fri Feb 10 01:49:08 2012
@@ -19,9 +19,6 @@ package org.apache.hadoop.fs.viewfs;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.List;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -31,11 +28,11 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.FsStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 
 /**
@@ -60,15 +57,14 @@ import org.apache.hadoop.util.Progressab
 
 @InterfaceAudience.Private
 @InterfaceStability.Evolving /*Evolving for a release,to be changed to Stable */
-class ChRootedFileSystem extends FileSystem {
-  private final FileSystem myFs; // the base file system whose root is changed
+class ChRootedFileSystem extends FilterFileSystem {
   private final URI myUri; // the base URI + the chRoot
   private final Path chRootPathPart; // the root below the root of the base
   private final String chRootPathPartString;
   private Path workingDir;
   
   protected FileSystem getMyFs() {
-    return myFs;
+    return getRawFileSystem();
   }
   
   /**
@@ -84,37 +80,16 @@ class ChRootedFileSystem extends FileSys
   
   /**
    * Constructor
-   * @param fs base file system
-   * @param theRoot chRoot for this file system
-   * @throws URISyntaxException
+   * @param uri base file system
+   * @param conf configuration
+   * @throws IOException 
    */
-  public ChRootedFileSystem(final FileSystem fs, final Path theRoot)
-    throws URISyntaxException {
-    myFs = fs;
-    myFs.makeQualified(theRoot); //check that root is a valid path for fs
-                            // Would like to call myFs.checkPath(theRoot); 
-                            // but not public
-    chRootPathPart = new Path(theRoot.toUri().getPath());
+  public ChRootedFileSystem(final URI uri, Configuration conf)
+      throws IOException {
+    super(FileSystem.get(uri, conf));
+    chRootPathPart = new Path(uri.getPath());
     chRootPathPartString = chRootPathPart.toUri().getPath();
-    try {
-      initialize(fs.getUri(), fs.getConf());
-    } catch (IOException e) { // This exception should not be thrown
-      throw new RuntimeException("This should not occur");
-    }
-    
-    /*
-     * We are making URI include the chrootedPath: e.g. file:///chrootedPath.
-     * This is questionable since Path#makeQualified(uri, path) ignores
-     * the pathPart of a uri. Since this class is internal we can ignore
-     * this issue but if we were to make it external then this needs
-     * to be resolved.
-     */
-    // Handle the two cases:
-    //              scheme:/// and scheme://authority/
-    myUri = new URI(myFs.getUri().toString() + 
-        (myFs.getUri().getAuthority() == null ? "" :  Path.SEPARATOR) +
-          chRootPathPart.toString().substring(1));
-
+    myUri = uri;
     workingDir = getHomeDirectory();
     // We don't use the wd of the myFs
   }
@@ -127,7 +102,6 @@ class ChRootedFileSystem extends FileSys
    */
   public void initialize(final URI name, final Configuration conf)
       throws IOException {
-    myFs.initialize(name, conf);
     super.initialize(name, conf);
     setConf(conf);
   }
@@ -137,12 +111,6 @@ class ChRootedFileSystem extends FileSys
     return myUri;
   }
   
-  @Override
-  public Path makeQualified(final Path path) {
-    return myFs.makeQualified(path);
-    // NOT myFs.makeQualified(fullPath(path));
-  }
- 
   /**
    * Strip out the root from the path.
    * @param p - fully qualified path p
@@ -175,7 +143,7 @@ class ChRootedFileSystem extends FileSys
   
   public Path getResolvedQualifiedPath(final Path f)
       throws FileNotFoundException {
-    return myFs.makeQualified(
+    return makeQualified(
         new Path(chRootPathPartString + f.toUri().toString()));
   }
   
@@ -199,14 +167,14 @@ class ChRootedFileSystem extends FileSys
   public FSDataOutputStream create(final Path f, final FsPermission permission,
       final boolean overwrite, final int bufferSize, final short replication,
       final long blockSize, final Progressable progress) throws IOException {
-    return myFs.create(fullPath(f), permission, overwrite, bufferSize,
+    return super.create(fullPath(f), permission, overwrite, bufferSize,
         replication, blockSize, progress);
   }
 
   @Override
   public boolean delete(final Path f, final boolean recursive) 
       throws IOException {
-    return myFs.delete(fullPath(f), recursive);
+    return super.delete(fullPath(f), recursive);
   }
   
 
@@ -219,95 +187,90 @@ class ChRootedFileSystem extends FileSys
   @Override
   public BlockLocation[] getFileBlockLocations(final FileStatus fs, final long start,
       final long len) throws IOException {
-    return myFs.getFileBlockLocations(
+    return super.getFileBlockLocations(
         new ViewFsFileStatus(fs, fullPath(fs.getPath())), start, len);
   }
 
   @Override
   public FileChecksum getFileChecksum(final Path f) 
       throws IOException {
-    return myFs.getFileChecksum(fullPath(f));
+    return super.getFileChecksum(fullPath(f));
   }
 
   @Override
   public FileStatus getFileStatus(final Path f) 
       throws IOException {
-    return myFs.getFileStatus(fullPath(f));
+    return super.getFileStatus(fullPath(f));
   }
 
   @Override
   public FsStatus getStatus(Path p) throws IOException {
-    return myFs.getStatus(fullPath(p));
+    return super.getStatus(fullPath(p));
   }
 
   @Override
   public FsServerDefaults getServerDefaults() throws IOException {
-    return myFs.getServerDefaults();
+    return super.getServerDefaults();
   }
 
   @Override
   public FileStatus[] listStatus(final Path f) 
       throws IOException {
-    return myFs.listStatus(fullPath(f));
+    return super.listStatus(fullPath(f));
   }
   
   @Override
   public boolean mkdirs(final Path f, final FsPermission permission)
       throws IOException {
-    return myFs.mkdirs(fullPath(f), permission);
+    return super.mkdirs(fullPath(f), permission);
   }
 
   @Override
   public FSDataInputStream open(final Path f, final int bufferSize) 
     throws IOException {
-    return myFs.open(fullPath(f), bufferSize);
+    return super.open(fullPath(f), bufferSize);
   }
   
   @Override
   public FSDataOutputStream append(final Path f, final int bufferSize,
       final Progressable progress) throws IOException {
-    return myFs.append(fullPath(f), bufferSize, progress);
+    return super.append(fullPath(f), bufferSize, progress);
   }
 
   @Override
   public boolean rename(final Path src, final Path dst) throws IOException {
     // note fullPath will check that paths are relative to this FileSystem.
     // Hence both are in same file system and a rename is valid
-    return myFs.rename(fullPath(src), fullPath(dst)); 
+    return super.rename(fullPath(src), fullPath(dst)); 
   }
   
   @Override
   public void setOwner(final Path f, final String username,
       final String groupname)
     throws IOException {
-    myFs.setOwner(fullPath(f), username, groupname);
+    super.setOwner(fullPath(f), username, groupname);
   }
 
   @Override
   public void setPermission(final Path f, final FsPermission permission)
     throws IOException {
-    myFs.setPermission(fullPath(f), permission);
+    super.setPermission(fullPath(f), permission);
   }
 
   @Override
   public boolean setReplication(final Path f, final short replication)
     throws IOException {
-    return myFs.setReplication(fullPath(f), replication);
+    return super.setReplication(fullPath(f), replication);
   }
 
   @Override
   public void setTimes(final Path f, final long mtime, final long atime) 
       throws IOException {
-    myFs.setTimes(fullPath(f), mtime, atime);
-  }
-
-  @Override
-  public void setVerifyChecksum(final boolean verifyChecksum)  {
-    myFs.setVerifyChecksum(verifyChecksum);
+    super.setTimes(fullPath(f), mtime, atime);
   }
   
   @Override
-  public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
-    return myFs.getDelegationTokens(renewer);
+  public Path resolvePath(final Path p) throws IOException {
+    return super.resolvePath(fullPath(p));
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Fri Feb 10 01:49:08 2012
@@ -168,8 +168,7 @@ public class ViewFileSystem extends File
         protected
         FileSystem getTargetFileSystem(final URI uri)
           throws URISyntaxException, IOException {
-            return new ChRootedFileSystem(FileSystem.get(uri, config), 
-                new Path(uri.getPath()));
+            return new ChRootedFileSystem(uri, config);
         }
 
         @Override
@@ -464,10 +463,22 @@ public class ViewFileSystem extends File
 
   @Override
   public void setVerifyChecksum(final boolean verifyChecksum) { 
-    // This is a file system level operations, however ViewFileSystem 
-    // points to many file systems. Noop for ViewFileSystem.
+    List<InodeTree.MountPoint<FileSystem>> mountPoints = 
+        fsState.getMountPoints();
+    for (InodeTree.MountPoint<FileSystem> mount : mountPoints) {
+      mount.target.targetFileSystem.setVerifyChecksum(verifyChecksum);
+    }
   }
   
+  @Override
+  public void setWriteChecksum(final boolean writeChecksum) { 
+    List<InodeTree.MountPoint<FileSystem>> mountPoints = 
+        fsState.getMountPoints();
+    for (InodeTree.MountPoint<FileSystem> mount : mountPoints) {
+      mount.target.targetFileSystem.setWriteChecksum(writeChecksum);
+    }
+  }
+
   public MountPoint[] getMountPoints() {
     List<InodeTree.MountPoint<FileSystem>> mountPoints = 
                   fsState.getMountPoints();

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java Fri Feb 10 01:49:08 2012
@@ -34,6 +34,7 @@ import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
 import javax.management.ReflectionException;
+import javax.management.RuntimeMBeanException;
 import javax.management.openmbean.CompositeData;
 import javax.management.openmbean.CompositeType;
 import javax.management.openmbean.TabularData;
@@ -308,6 +309,15 @@ public class JMXJsonServlet extends Http
     Object value = null;
     try {
       value = mBeanServer.getAttribute(oname, attName);
+    } catch (RuntimeMBeanException e) {
+      // UnsupportedOperationExceptions happen in the normal course of business,
+      // so no need to log them as errors all the time.
+      if (e.getCause() instanceof UnsupportedOperationException) {
+        LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      } else {
+        LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      }
+      return;
     } catch (AttributeNotFoundException e) {
       //Ignored the attribute was not found, which should never happen because the bean
       //just told us that it has this attribute, but if this happens just don't output

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java Fri Feb 10 01:49:08 2012
@@ -230,14 +230,34 @@ public class Credentials implements Writ
  
   /**
    * Copy all of the credentials from one credential object into another.
+   * Existing secrets and tokens are overwritten.
    * @param other the credentials to copy
    */
   public void addAll(Credentials other) {
+    addAll(other, true);
+  }
+
+  /**
+   * Copy all of the credentials from one credential object into another.
+   * Existing secrets and tokens are not overwritten.
+   * @param other the credentials to copy
+   */
+  public void mergeAll(Credentials other) {
+    addAll(other, false);
+  }
+
+  private void addAll(Credentials other, boolean overwrite) {
     for(Map.Entry<Text, byte[]> secret: other.secretKeysMap.entrySet()) {
-      secretKeysMap.put(secret.getKey(), secret.getValue());
+      Text key = secret.getKey();
+      if (!secretKeysMap.containsKey(key) || overwrite) {
+        secretKeysMap.put(key, secret.getValue());
+      }
     }
     for(Map.Entry<Text, Token<?>> token: other.tokenMap.entrySet()){
-      tokenMap.put(token.getKey(), token.getValue());
+      Text key = token.getKey();
+      if (!tokenMap.containsKey(key) || overwrite) {
+        tokenMap.put(key, token.getValue());
+      }
     }
   }
 }

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Feb 10 01:49:08 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1237154
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1242632
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Fri Feb 10 01:49:08 2012
@@ -72,14 +72,15 @@ public final class FileSystemTestHelper 
 
   public static String getAbsoluteTestRootDir(FileSystem fSys)
       throws IOException {
-    if (absTestRootDir == null) {
+    // NOTE: can't cache because of different filesystems!
+    //if (absTestRootDir == null) 
       if (TEST_ROOT_DIR.startsWith("/")) {
         absTestRootDir = TEST_ROOT_DIR;
       } else {
         absTestRootDir = fSys.getWorkingDirectory().toString() + "/"
             + TEST_ROOT_DIR;
       }
-    }
+    //}
     return absTestRootDir;
   }
   

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java Fri Feb 10 01:49:08 2012
@@ -203,4 +203,58 @@ public class TestChecksumFileSystem {
     String str = readFile(localFs, testPath, 1024);
     assertEquals("testing stale checksum", str);
   }
+  
+  @Test
+  public void testRenameFileToFile() throws Exception {
+    Path srcPath = new Path(TEST_ROOT_DIR, "testRenameSrc");
+    Path dstPath = new Path(TEST_ROOT_DIR, "testRenameDst");
+    verifyRename(srcPath, dstPath, false);
+  }
+
+  @Test
+  public void testRenameFileIntoDir() throws Exception {
+    Path srcPath = new Path(TEST_ROOT_DIR, "testRenameSrc");
+    Path dstPath = new Path(TEST_ROOT_DIR, "testRenameDir");
+    localFs.mkdirs(dstPath);
+    verifyRename(srcPath, dstPath, true);
+  }
+
+  @Test
+  public void testRenameFileIntoDirFile() throws Exception {
+    Path srcPath = new Path(TEST_ROOT_DIR, "testRenameSrc");
+    Path dstPath = new Path(TEST_ROOT_DIR, "testRenameDir/testRenameDst");
+    assertTrue(localFs.mkdirs(dstPath));
+    verifyRename(srcPath, dstPath, false);
+  }
+
+
+  void verifyRename(Path srcPath, Path dstPath, boolean dstIsDir)
+      throws Exception { 
+    localFs.delete(srcPath,true);
+    localFs.delete(dstPath,true);
+    
+    Path realDstPath = dstPath;
+    if (dstIsDir) {
+      localFs.mkdirs(dstPath);
+      realDstPath = new Path(dstPath, srcPath.getName());
+    }
+    
+    // ensure file + checksum are moved
+    writeFile(localFs, srcPath, 1);
+    assertTrue(localFs.exists(localFs.getChecksumFile(srcPath)));
+    assertTrue(localFs.rename(srcPath, dstPath));
+    assertTrue(localFs.exists(localFs.getChecksumFile(realDstPath)));
+
+    // create a file with no checksum, rename, ensure dst checksum is removed    
+    writeFile(localFs.getRawFileSystem(), srcPath, 1);
+    assertFalse(localFs.exists(localFs.getChecksumFile(srcPath)));
+    assertTrue(localFs.rename(srcPath, dstPath));
+    assertFalse(localFs.exists(localFs.getChecksumFile(realDstPath)));
+    
+    // create file with checksum, rename over prior dst with no checksum
+    writeFile(localFs, srcPath, 1);
+    assertTrue(localFs.exists(localFs.getChecksumFile(srcPath)));
+    assertTrue(localFs.rename(srcPath, dstPath));
+    assertTrue(localFs.exists(localFs.getChecksumFile(realDstPath)));
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java Fri Feb 10 01:49:08 2012
@@ -51,7 +51,7 @@ public class TestChRootedFileSystem {
 
 
     // ChRoot to the root of the testDirectory
-    fSys = new ChRootedFileSystem(fSysTarget, chrootedTo);
+    fSys = new ChRootedFileSystem(chrootedTo.toUri(), conf);
   }
 
   @After

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java Fri Feb 10 01:49:08 2012
@@ -137,4 +137,81 @@ public class TestCredentials {
     }
     tmpFileName.delete();
   }
+
+  static Text secret[] = {
+      new Text("secret1"),
+      new Text("secret2"),
+      new Text("secret3"),
+      new Text("secret4")
+  };
+  static Text service[] = {
+      new Text("service1"),
+      new Text("service2"),
+      new Text("service3"),
+      new Text("service4")
+  };
+  static Token<?> token[] = {
+      new Token<TokenIdentifier>(),
+      new Token<TokenIdentifier>(),
+      new Token<TokenIdentifier>(),
+      new Token<TokenIdentifier>()
+  };
+  
+  @Test
+  public void addAll() {
+    Credentials creds = new Credentials();
+    creds.addToken(service[0], token[0]);
+    creds.addToken(service[1], token[1]);
+    creds.addSecretKey(secret[0], secret[0].getBytes());
+    creds.addSecretKey(secret[1], secret[1].getBytes());
+
+    Credentials credsToAdd = new Credentials();
+    // one duplicate with different value, one new
+    credsToAdd.addToken(service[0], token[3]);
+    credsToAdd.addToken(service[2], token[2]);
+    credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
+    credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
+    
+    creds.addAll(credsToAdd);
+    assertEquals(3, creds.numberOfTokens());
+    assertEquals(3, creds.numberOfSecretKeys());
+    // existing token & secret should be overwritten
+    assertEquals(token[3], creds.getToken(service[0]));
+    assertEquals(secret[3], new Text(creds.getSecretKey(secret[0])));
+    // non-duplicate token & secret should be present
+    assertEquals(token[1], creds.getToken(service[1]));
+    assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
+    // new token & secret should be added
+    assertEquals(token[2], creds.getToken(service[2]));
+    assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
+  }
+
+  @Test
+  public void mergeAll() {
+    Credentials creds = new Credentials();
+    creds.addToken(service[0], token[0]);
+    creds.addToken(service[1], token[1]);
+    creds.addSecretKey(secret[0], secret[0].getBytes());
+    creds.addSecretKey(secret[1], secret[1].getBytes());
+    
+    Credentials credsToAdd = new Credentials();
+    // one duplicate with different value, one new
+    credsToAdd.addToken(service[0], token[3]);
+    credsToAdd.addToken(service[2], token[2]);
+    credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
+    credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
+    
+    creds.mergeAll(credsToAdd);
+    assertEquals(3, creds.numberOfTokens());
+    assertEquals(3, creds.numberOfSecretKeys());
+    // existing token & secret should not be overwritten
+    assertEquals(token[0], creds.getToken(service[0]));
+    assertEquals(secret[0], new Text(creds.getSecretKey(secret[0])));
+    // non-duplicate token & secret should be present
+    assertEquals(token[1], creds.getToken(service[1]));
+    assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
+    // new token & secret should be added
+    assertEquals(token[2], creds.getToken(service[2]));
+    assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
  }
+}
\ No newline at end of file