You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by at...@apache.org on 2012/01/28 01:21:10 UTC

svn commit: r1236936 - in /hadoop/common/branches/HDFS-1623/hadoop-common-project: dev-support/ hadoop-common/ hadoop-common/dev-support/ hadoop-common/src/main/bin/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/...

Author: atm
Date: Sat Jan 28 00:21:07 2012
New Revision: 1236936

URL: http://svn.apache.org/viewvc?rev=1236936&view=rev
Log:
Merge trunk into HA branch.

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoPB.java
      - copied unchanged from r1236791, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoPB.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java
      - copied unchanged from r1236791, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java
      - copied unchanged from r1236791, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
      - copied unchanged from r1236791, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcInvocationHandler.java
      - copied unchanged from r1236791, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcInvocationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
      - copied unchanged from r1236791, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/dev-support/test-patch.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/dev-support/test-patch.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/dev-support/test-patch.properties?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/dev-support/test-patch.properties (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/dev-support/test-patch.properties Sat Jan 28 00:21:07 2012
@@ -18,4 +18,4 @@
 
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
-OK_JAVADOC_WARNINGS=6
+OK_JAVADOC_WARNINGS=13

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Sat Jan 28 00:21:07 2012
@@ -74,7 +74,11 @@ Trunk (unreleased changes)
 
     HADOOP-7987. Support setting the run-as user in unsecure mode. (jitendra)
 
+    HADOOP-7965. Support for protocol version and signature in PB. (jitendra)
+
   BUGS
+    HADOOP-7998. CheckFileSystem does not correctly honor setVerifyChecksum
+                 (Daryn Sharp via bobby)
 
     HADOOP-7851. Configuration.getClasses() never returns the default value. 
                  (Uma Maheswara Rao G via amarrk)
@@ -209,6 +213,8 @@ Release 0.23.1 - Unreleased
     HADOOP-7919. Remove the unused hadoop.logfile.* properties from the 
     core-default.xml file. (harsh)
 
+    HADOOP-7939. Improve Hadoop subcomponent integration in Hadoop 0.23. (rvs via tucu)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -290,6 +296,15 @@ Release 0.23.1 - Unreleased
    HADOOP-7981. Improve documentation for org.apache.hadoop.io.compress.
    Decompressor.getRemaining (Jonathan Eagles via mahadev)
 
+   HADOOP-7997. SequenceFile.createWriter(...createParent...) no
+   longer works on existing file. (Gregory Chanan via eli)
+
+   HADOOP-7993. Hadoop ignores old-style config options for enabling compressed 
+   output. (Anupam Seth via mahadev)
+
+   HADOOP-8000. fetchdt command not available in bin/hadoop.
+   (Arpit Gupta via mahadev)
+
 Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Jan 28 00:21:07 2012
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1236328
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1236935
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Sat Jan 28 00:21:07 2012
@@ -274,4 +274,8 @@
       <!-- protobuf generated code -->
       <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.HadoopRpcProtos.*"/>
     </Match>
+    <Match>
+      <!-- protobuf generated code -->
+      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtocolInfoProtos.*"/>
+    </Match>
  </FindBugsFilter>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop Sat Jan 28 00:21:07 2012
@@ -51,7 +51,7 @@ fi
 COMMAND=$1
 case $COMMAND in
   #hdfs commands
-  namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer)
+  namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt)
     echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2
     echo "Instead use the hdfs command for it." 1>&2
     echo "" 1>&2

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh Sat Jan 28 00:21:07 2012
@@ -25,9 +25,21 @@ common_bin=$(cd -P -- "$(dirname -- "$th
 script="$(basename -- "$this")"
 this="$common_bin/$script"
 
+[ -f "$common_bin/hadoop-layout.sh" ] && . "$common_bin/hadoop-layout.sh"
+
+HADOOP_COMMON_DIR=${HADOOP_COMMON_DIR:-"share/hadoop/common"}
+HADOOP_COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR:-"share/hadoop/common/lib"}
+HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_COMMON_LIB_NATIVE_DIR:-"lib/native"}
+HDFS_DIR=${HDFS_DIR:-"share/hadoop/hdfs"}
+HDFS_LIB_JARS_DIR=${HDFS_LIB_JARS_DIR:-"share/hadoop/hdfs/lib"}
+YARN_DIR=${YARN_DIR:-"share/hadoop/mapreduce"}
+YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
+MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
+MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
+
 # the root of the Hadoop installation
 # See HADOOP-6255 for directory structure layout
-HADOOP_DEFAULT_PREFIX=`dirname "$this"`/..
+HADOOP_DEFAULT_PREFIX=$(cd -P -- "$common_bin"/.. && pwd -P)
 HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
 export HADOOP_PREFIX
 
@@ -144,16 +156,22 @@ CLASSPATH="${HADOOP_CONF_DIR}"
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
+if [ "$HADOOP_COMMON_HOME" = "" ]; then
+  if [ -d "${HADOOP_PREFIX}/$HADOOP_COMMON_DIR" ]; then
+    HADOOP_COMMON_HOME=$HADOOP_PREFIX
+  fi
+fi
+
 # for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_PREFIX/share/hadoop/common/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common/webapps
+if [ -d "$HADOOP_COMMON_HOME/$HADOOP_COMMON_DIR/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/$HADOOP_COMMON_DIR
 fi
 
-if [ -d "$HADOOP_PREFIX/share/hadoop/common/lib" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common/lib'/*'
+if [ -d "$HADOOP_COMMON_HOME/$HADOOP_COMMON_LIB_JARS_DIR" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/$HADOOP_COMMON_LIB_JARS_DIR'/*'
 fi
 
-CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common'/*'
+CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/$HADOOP_COMMON_DIR'/*'
 
 # add user-specified CLASSPATH last
 if [ "$HADOOP_CLASSPATH" != "" ]; then
@@ -185,13 +203,13 @@ fi
 
 # setup 'java.library.path' for native-hadoop code if necessary
 
-if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/lib/native" ]; then
+if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR" ]; then
     
-  if [ -d "${HADOOP_PREFIX}/lib/native" ]; then
+  if [ -d "${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR" ]; then
     if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_PREFIX}/lib/native
+      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR
     else
-      JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib/native
+      JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR
     fi
   fi
 fi
@@ -216,37 +234,56 @@ HADOOP_OPTS="$HADOOP_OPTS -Djava.net.pre
 
 # put hdfs in classpath if present
 if [ "$HADOOP_HDFS_HOME" = "" ]; then
-  if [ -d "${HADOOP_PREFIX}/share/hadoop/hdfs" ]; then
+  if [ -d "${HADOOP_PREFIX}/$HDFS_DIR" ]; then
     HADOOP_HDFS_HOME=$HADOOP_PREFIX
   fi
 fi
 
-if [ -d "$HADOOP_HDFS_HOME/share/hadoop/hdfs/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs
+if [ -d "$HADOOP_HDFS_HOME/$HDFS_DIR/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/$HDFS_DIR
 fi
 
-if [ -d "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib'/*'
+if [ -d "$HADOOP_HDFS_HOME/$HDFS_LIB_JARS_DIR" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/$HDFS_LIB_JARS_DIR'/*'
 fi
 
-CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs'/*'
+CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/$HDFS_DIR'/*'
 
 # put yarn in classpath if present
 if [ "$YARN_HOME" = "" ]; then
-  if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
+  if [ -d "${HADOOP_PREFIX}/$YARN_DIR" ]; then
     YARN_HOME=$HADOOP_PREFIX
   fi
 fi
 
-if [ -d "$YARN_HOME/share/hadoop/mapreduce/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$YARN_HOME/share/hadoop/mapreduce
+if [ -d "$YARN_HOME/$YARN_DIR/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_DIR
+fi
+
+if [ -d "$YARN_HOME/$YARN_LIB_JARS_DIR" ]; then
+  CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_LIB_JARS_DIR'/*'
 fi
 
-if [ -d "$YARN_HOME/share/hadoop/mapreduce/lib" ]; then
-  CLASSPATH=${CLASSPATH}:$YARN_HOME/share/hadoop/mapreduce/lib'/*'
+CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_DIR'/*'
+
+# put mapred in classpath if present AND different from YARN
+if [ "$HADOOP_MAPRED_HOME" = "" ]; then
+  if [ -d "${HADOOP_PREFIX}/$MAPRED_DIR" ]; then
+    HADOOP_MAPRED_HOME=$HADOOP_PREFIX
+  fi
 fi
 
-CLASSPATH=${CLASSPATH}:$YARN_HOME/share/hadoop/mapreduce'/*'
+if [ "$HADOOP_MAPRED_HOME/$MAPRED_DIR" != "$YARN_HOME/$YARN_DIR" ] ; then
+  if [ -d "$HADOOP_MAPRED_HOME/$MAPRED_DIR/webapps" ]; then
+    CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/$MAPRED_DIR
+  fi
+
+  if [ -d "$HADOOP_MAPRED_HOME/$MAPRED_LIB_JARS_DIR" ]; then
+    CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/$MAPRED_LIB_JARS_DIR'/*'
+  fi
+
+  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/$MAPRED_DIR'/*'
+fi
 
 # cygwin path translation
 if $cygwin; then

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Jan 28 00:21:07 2012
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1236328
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1236935
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Jan 28 00:21:07 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1236328
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1236935
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Sat Jan 28 00:21:07 2012
@@ -345,7 +345,17 @@ public class Configuration implements It
     }
     return name;
   }
-  
+ 
+  private void handleDeprecation() {
+    LOG.debug("Handling deprecation for all properties in config...");
+    Set<Object> keys = new HashSet<Object>();
+    keys.addAll(getProps().keySet());
+    for (Object item: keys) {
+      LOG.debug("Handling deprecation for " + (String)item);
+      handleDeprecation((String)item);
+    }
+  }
+ 
   static{
     //print deprecation warning if hadoop-site.xml is found in classpath
     ClassLoader cL = Thread.currentThread().getContextClassLoader();
@@ -1667,7 +1677,7 @@ public class Configuration implements It
     Element conf = doc.createElement("configuration");
     doc.appendChild(conf);
     conf.appendChild(doc.createTextNode("\n"));
-    getProps(); // ensure properties is set
+    handleDeprecation(); //ensure properties is set and deprecation is handled
     for (Enumeration e = properties.keys(); e.hasMoreElements();) {
       String name = (String)e.nextElement();
       Object object = properties.get(name);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java Sat Jan 28 00:21:07 2012
@@ -304,8 +304,9 @@ public abstract class ChecksumFileSystem
    */
   @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
-    return new FSDataInputStream(
-        new ChecksumFSInputChecker(this, f, bufferSize));
+    return verifyChecksum
+      ? new FSDataInputStream(new ChecksumFSInputChecker(this, f, bufferSize))
+      : getRawFileSystem().open(f, bufferSize);
   }
 
   /** {@inheritDoc} */

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Sat Jan 28 00:21:07 2012
@@ -467,7 +467,7 @@ public class SequenceFile {
                Metadata metadata) throws IOException {
     return createWriter(FileContext.getFileContext(fs.getUri(), conf),
         conf, name, keyClass, valClass, compressionType, codec,
-        metadata, EnumSet.of(CreateFlag.CREATE),
+        metadata, EnumSet.of(CreateFlag.CREATE,CreateFlag.OVERWRITE),
         CreateOpts.bufferSize(bufferSize),
         createParent ? CreateOpts.createParent()
                      : CreateOpts.donotCreateParent(),

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java Sat Jan 28 00:21:07 2012
@@ -17,11 +17,10 @@
  */
 package org.apache.hadoop.io.retry;
 
-import java.io.Closeable;
 import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
 import java.util.Collections;
 import java.util.Map;
 
@@ -29,8 +28,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
 import org.apache.hadoop.util.ThreadUtil;
+import org.apache.hadoop.ipc.Client.ConnectionId;
+import org.apache.hadoop.ipc.RpcInvocationHandler;
 
-class RetryInvocationHandler implements InvocationHandler, Closeable {
+class RetryInvocationHandler implements RpcInvocationHandler {
   public static final Log LOG = LogFactory.getLog(RetryInvocationHandler.class);
   private FailoverProxyProvider proxyProvider;
 
@@ -160,4 +161,11 @@ class RetryInvocationHandler implements 
     proxyProvider.close();
   }
 
+  @Override //RpcInvocationHandler
+  public ConnectionId getConnectionId() {
+    RpcInvocationHandler inv = (RpcInvocationHandler) Proxy
+        .getInvocationHandler(currentProxy);
+    return inv.getConnectionId();
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Sat Jan 28 00:21:07 2012
@@ -18,11 +18,9 @@
 
 package org.apache.hadoop.ipc;
 
-import java.io.Closeable;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.net.InetSocketAddress;
@@ -37,6 +35,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
 import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto;
@@ -51,7 +50,6 @@ import org.apache.hadoop.util.StringUtil
 import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.BlockingService;
 import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.InvalidProtocolBufferException;
 import com.google.protobuf.Message;
 import com.google.protobuf.ServiceException;
 
@@ -80,8 +78,19 @@ public class ProtobufRpcEngine implement
         .getClassLoader(), new Class[] { protocol }, new Invoker(protocol,
         addr, ticket, conf, factory, rpcTimeout)), false);
   }
+  
+  @Override
+  public ProtocolProxy<ProtocolMetaInfoPB> getProtocolMetaInfoProxy(
+      ConnectionId connId, Configuration conf, SocketFactory factory)
+      throws IOException {
+    Class<ProtocolMetaInfoPB> protocol = ProtocolMetaInfoPB.class;
+    return new ProtocolProxy<ProtocolMetaInfoPB>(protocol,
+        (ProtocolMetaInfoPB) Proxy.newProxyInstance(protocol.getClassLoader(),
+            new Class[] { protocol }, new Invoker(protocol, connId, conf,
+                factory)), false);
+  }
 
-  private static class Invoker implements InvocationHandler, Closeable {
+  private static class Invoker implements RpcInvocationHandler {
     private final Map<String, Message> returnTypes = 
         new ConcurrentHashMap<String, Message>();
     private boolean isClosed = false;
@@ -93,12 +102,20 @@ public class ProtobufRpcEngine implement
     public Invoker(Class<?> protocol, InetSocketAddress addr,
         UserGroupInformation ticket, Configuration conf, SocketFactory factory,
         int rpcTimeout) throws IOException {
-      this.remoteId = Client.ConnectionId.getConnectionId(addr, protocol,
-          ticket, rpcTimeout, conf);
-      this.client = CLIENTS.getClient(conf, factory,
-          RpcResponseWritable.class);
-      this.clientProtocolVersion = RPC.getProtocolVersion(protocol);
+      this(protocol, Client.ConnectionId.getConnectionId(addr, protocol,
+          ticket, rpcTimeout, conf), conf, factory);
+    }
+    
+    /**
+     * This constructor takes a connectionId, instead of creating a new one.
+     */
+    public Invoker(Class<?> protocol, Client.ConnectionId connId,
+        Configuration conf, SocketFactory factory) {
+      this.remoteId = connId;
+      this.client = CLIENTS.getClient(conf, factory, RpcResponseWritable.class);
       this.protocolName = RPC.getProtocolName(protocol);
+      this.clientProtocolVersion = RPC
+          .getProtocolVersion(protocol);
     }
 
     private HadoopRpcRequestProto constructRpcRequest(Method method,
@@ -222,6 +239,11 @@ public class ProtobufRpcEngine implement
       returnTypes.put(method.getName(), prototype);
       return prototype;
     }
+
+    @Override //RpcInvocationHandler
+    public ConnectionId getConnectionId() {
+      return remoteId;
+    }
   }
 
   @Override

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java Sat Jan 28 00:21:07 2012
@@ -183,7 +183,7 @@ public class ProtocolSignature implement
    * @return its signature and finger print
    */
   private static ProtocolSigFingerprint getSigFingerprint(
-      Class <? extends VersionedProtocol> protocol, long serverVersion) {
+      Class <?> protocol, long serverVersion) {
     String protocolName = RPC.getProtocolName(protocol);
     synchronized (PROTOCOL_FINGERPRINT_CACHE) {
       ProtocolSigFingerprint sig = PROTOCOL_FINGERPRINT_CACHE.get(protocolName);
@@ -221,6 +221,12 @@ public class ProtocolSignature implement
     return sig.signature;
   }
   
+  public static ProtocolSignature getProtocolSignature(String protocolName,
+      long version) throws ClassNotFoundException {
+    Class<?> protocol = Class.forName(protocolName);
+    return getSigFingerprint(protocol, version).signature;
+  }
+  
   /**
    * Get a server protocol's signature
    *

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Sat Jan 28 00:21:07 2012
@@ -41,6 +41,7 @@ import org.apache.commons.logging.*;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
+import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolInfoService;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -49,6 +50,8 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import com.google.protobuf.BlockingService;
+
 /** A simple RPC mechanism.
  *
  * A <i>protocol</i> is a Java interface.  All parameters and return types must
@@ -177,8 +180,8 @@ public class RPC {
   }
 
   // return the RpcEngine configured to handle a protocol
-  private static synchronized RpcEngine getProtocolEngine(Class<?> protocol,
-                                                          Configuration conf) {
+  static synchronized RpcEngine getProtocolEngine(Class<?> protocol,
+      Configuration conf) {
     RpcEngine engine = PROTOCOL_ENGINES.get(protocol);
     if (engine == null) {
       Class<?> impl = conf.getClass(ENGINE_PROP+"."+protocol.getName(),
@@ -522,7 +525,16 @@ public class RPC {
 
      return getProtocolProxy(protocol, clientVersion, addr, conf).getProxy();
    }
-
+  
+  /**
+   * Returns the server address for a given proxy.
+   */
+  public static InetSocketAddress getServerAddress(Object proxy) {
+    RpcInvocationHandler inv = (RpcInvocationHandler) Proxy
+        .getInvocationHandler(proxy);
+    return inv.getConnectionId().getAddress();
+  }
+   
   /**
    * Get a protocol proxy that contains a proxy connection to a remote server
    * and a set of methods that are supported by the server
@@ -817,6 +829,19 @@ public class RPC {
                      SecretManager<? extends TokenIdentifier> secretManager) throws IOException {
       super(bindAddress, port, paramClass, handlerCount, numReaders, queueSizePerHandler,
             conf, serverName, secretManager);
+      initProtocolMetaInfo(conf);
+    }
+    
+    private void initProtocolMetaInfo(Configuration conf)
+        throws IOException {
+      RPC.setProtocolEngine(conf, ProtocolMetaInfoPB.class,
+          ProtobufRpcEngine.class);
+      ProtocolMetaInfoServerSideTranslatorPB xlator = 
+          new ProtocolMetaInfoServerSideTranslatorPB(this);
+      BlockingService protocolInfoBlockingService = ProtocolInfoService
+          .newReflectiveBlockingService(xlator);
+      addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, ProtocolMetaInfoPB.class,
+          protocolInfoBlockingService);
     }
     
     /**

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java Sat Jan 28 00:21:07 2012
@@ -26,6 +26,7 @@ import javax.net.SocketFactory;
 
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -54,4 +55,16 @@ public interface RpcEngine {
                        SecretManager<? extends TokenIdentifier> secretManager
                        ) throws IOException;
 
+  /**
+   * Returns a proxy for ProtocolMetaInfoPB, which uses the given connection
+   * id.
+   * @param connId, ConnectionId to be used for the proxy.
+   * @param conf, Configuration.
+   * @param factory, Socket factory.
+   * @return Proxy object.
+   * @throws IOException
+   */
+  ProtocolProxy<ProtocolMetaInfoPB> getProtocolMetaInfoProxy(
+      ConnectionId connId, Configuration conf, SocketFactory factory)
+      throws IOException;
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Sat Jan 28 00:21:07 2012
@@ -21,18 +21,17 @@ package org.apache.hadoop.ipc;
 import java.lang.reflect.Proxy;
 import java.lang.reflect.Method;
 import java.lang.reflect.Array;
-import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 
 import java.net.InetSocketAddress;
 import java.io.*;
-import java.io.Closeable;
 
 import javax.net.SocketFactory;
 
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
 import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.VersionedProtocol;
@@ -202,7 +201,7 @@ public class WritableRpcEngine implement
 
   private static ClientCache CLIENTS=new ClientCache();
   
-  private static class Invoker implements InvocationHandler, Closeable {
+  private static class Invoker implements RpcInvocationHandler {
     private Client.ConnectionId remoteId;
     private Client client;
     private boolean isClosed = false;
@@ -239,6 +238,11 @@ public class WritableRpcEngine implement
         CLIENTS.stopClient(client);
       }
     }
+
+    @Override
+    public ConnectionId getConnectionId() {
+      return remoteId;
+    }
   }
   
   // for unit testing only
@@ -524,4 +528,11 @@ public class WritableRpcEngine implement
       }
     }
   }
+
+  @Override
+  public ProtocolProxy<ProtocolMetaInfoPB> getProtocolMetaInfoProxy(
+      ConnectionId connId, Configuration conf, SocketFactory factory)
+      throws IOException {
+    throw new UnsupportedOperationException("This proxy is not supported");
+  }
 }

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Jan 28 00:21:07 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1236328
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1236935
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java Sat Jan 28 00:21:07 2012
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.conf;
 
+import java.io.ByteArrayOutputStream;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 
@@ -32,4 +34,22 @@ public class TestDeprecatedKeys extends 
     String scriptFile = conf.get(CommonConfigurationKeys.NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY);
     assertTrue(scriptFile.equals("xyz")) ;
   }
+  
+  //Tests reading / writing a conf file with deprecation after setting
+  public void testReadWriteWithDeprecatedKeys() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean("old.config.yet.to.be.deprecated", true);
+    Configuration.addDeprecation("old.config.yet.to.be.deprecated", 
+	new String[]{"new.conf.to.replace.deprecated.conf"});
+    ByteArrayOutputStream out=new ByteArrayOutputStream();
+    String fileContents;
+    try {
+      conf.writeXml(out);
+      fileContents = out.toString();
+    } finally {
+      out.close();
+    }
+    assertTrue(fileContents.contains("old.config.yet.to.be.deprecated"));
+    assertTrue(fileContents.contains("new.conf.to.replace.deprecated.conf"));
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java Sat Jan 28 00:21:07 2012
@@ -22,12 +22,22 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import static org.apache.hadoop.fs.FileSystemTestHelper.*;
 import org.apache.hadoop.conf.Configuration;
-import junit.framework.TestCase;
+import org.junit.*;
+import static org.junit.Assert.*;
 
-public class TestChecksumFileSystem extends TestCase {
+public class TestChecksumFileSystem {
   static final String TEST_ROOT_DIR
     = System.getProperty("test.build.data","build/test/data/work-dir/localfs");
 
+  static LocalFileSystem localFs;
+
+  @Before
+  public void resetLocalFs() throws Exception {
+    localFs = FileSystem.getLocal(new Configuration());
+    localFs.setVerifyChecksum(true);
+  }
+
+  @Test
   public void testgetChecksumLength() throws Exception {
     assertEquals(8, ChecksumFileSystem.getChecksumLength(0L, 512));
     assertEquals(12, ChecksumFileSystem.getChecksumLength(1L, 512));
@@ -40,9 +50,8 @@ public class TestChecksumFileSystem exte
                  ChecksumFileSystem.getChecksumLength(10000000000000L, 10));    
   } 
   
+  @Test
   public void testVerifyChecksum() throws Exception {    
-    Configuration conf = new Configuration();
-    LocalFileSystem localFs = FileSystem.getLocal(conf);
     Path testPath = new Path(TEST_ROOT_DIR, "testPath");
     Path testPath11 = new Path(TEST_ROOT_DIR, "testPath11");
     FSDataOutputStream fout = localFs.create(testPath);
@@ -68,7 +77,7 @@ public class TestChecksumFileSystem exte
     
     //copying the wrong checksum file
     FileUtil.copy(localFs, localFs.getChecksumFile(testPath11), localFs, 
-        localFs.getChecksumFile(testPath),false,true,conf);
+        localFs.getChecksumFile(testPath),false,true,localFs.getConf());
     assertTrue("checksum exists", localFs.exists(localFs.getChecksumFile(testPath)));
     
     boolean errorRead = false;
@@ -80,20 +89,13 @@ public class TestChecksumFileSystem exte
     assertTrue("error reading", errorRead);
     
     //now setting verify false, the read should succeed
-    try {
-      localFs.setVerifyChecksum(false);
-      String str = readFile(localFs, testPath, 1024).toString();
-      assertTrue("read", "testing".equals(str));
-    } finally {
-      // reset for other tests
-      localFs.setVerifyChecksum(true);
-    }
-    
+    localFs.setVerifyChecksum(false);
+    String str = readFile(localFs, testPath, 1024).toString();
+    assertTrue("read", "testing".equals(str));
   }
 
+  @Test
   public void testMultiChunkFile() throws Exception {
-    Configuration conf = new Configuration();
-    LocalFileSystem localFs = FileSystem.getLocal(conf);
     Path testPath = new Path(TEST_ROOT_DIR, "testMultiChunk");
     FSDataOutputStream fout = localFs.create(testPath);
     for (int i = 0; i < 1000; i++) {
@@ -116,9 +118,8 @@ public class TestChecksumFileSystem exte
    * Test to ensure that if the checksum file is truncated, a
    * ChecksumException is thrown
    */
+  @Test
   public void testTruncatedChecksum() throws Exception { 
-    Configuration conf = new Configuration();
-    LocalFileSystem localFs = FileSystem.getLocal(conf);
     Path testPath = new Path(TEST_ROOT_DIR, "testtruncatedcrc");
     FSDataOutputStream fout = localFs.create(testPath);
     fout.write("testing truncation".getBytes());
@@ -146,14 +147,60 @@ public class TestChecksumFileSystem exte
     }
 
     // telling it not to verify checksums, should avoid issue.
+    localFs.setVerifyChecksum(false);
+    String str = readFile(localFs, testPath, 1024).toString();
+    assertTrue("read", "testing truncation".equals(str));
+  }
+  
+  @Test
+  public void testStreamType() throws Exception {
+    Path testPath = new Path(TEST_ROOT_DIR, "testStreamType");
+    localFs.create(testPath).close();    
+    FSDataInputStream in = null;
+    
+    localFs.setVerifyChecksum(true);
+    in = localFs.open(testPath);
+    assertTrue("stream is input checker",
+        in.getWrappedStream() instanceof FSInputChecker);
+    
+    localFs.setVerifyChecksum(false);
+    in = localFs.open(testPath);
+    assertFalse("stream is not input checker",
+        in.getWrappedStream() instanceof FSInputChecker);
+  }
+  
+  @Test
+  public void testCorruptedChecksum() throws Exception {
+    Path testPath = new Path(TEST_ROOT_DIR, "testCorruptChecksum");
+    Path checksumPath = localFs.getChecksumFile(testPath);
+
+    // write a file to generate checksum
+    FSDataOutputStream out = localFs.create(testPath, true);
+    out.write("testing 1 2 3".getBytes());
+    out.close();
+    assertTrue(localFs.exists(checksumPath));
+    FileStatus stat = localFs.getFileStatus(checksumPath);
+    
+    // alter file directly so checksum is invalid
+    out = localFs.getRawFileSystem().create(testPath, true);
+    out.write("testing stale checksum".getBytes());
+    out.close();
+    assertTrue(localFs.exists(checksumPath));
+    // checksum didn't change on disk
+    assertEquals(stat, localFs.getFileStatus(checksumPath));
+
+    Exception e = null;
     try {
-      localFs.setVerifyChecksum(false);
-      String str = readFile(localFs, testPath, 1024).toString();
-      assertTrue("read", "testing truncation".equals(str));
-    } finally {
-      // reset for other tests
       localFs.setVerifyChecksum(true);
+      readFile(localFs, testPath, 1024);
+    } catch (ChecksumException ce) {
+      e = ce;
+    } finally {
+      assertNotNull("got checksum error", e);
     }
 
+    localFs.setVerifyChecksum(false);
+    String str = readFile(localFs, testPath, 1024);
+    assertEquals("testing stale checksum", str);
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java Sat Jan 28 00:21:07 2012
@@ -517,6 +517,23 @@ public class TestSequenceFile extends Te
     assertTrue("InputStream for " + path + " should have been closed.", openedFile[0].isClosed());
   }
 
+   /**
+   * Test that makes sure createWriter succeeds on a file that was 
+   * already created
+   * @throws IOException
+   */
+  public void testCreateWriterOnExistingFile() throws IOException {
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.getLocal(conf);
+    Path name = new Path(new Path(System.getProperty("test.build.data","."),
+        "createWriterOnExistingFile") , "file");
+
+    fs.create(name);
+    SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
+        RandomDatum.class, 512, (short) 1, 4096, false,
+        CompressionType.NONE, null, new Metadata());
+  }
+
   public void testRecursiveSeqFileCreate() throws IOException {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java Sat Jan 28 00:21:07 2012
@@ -39,6 +39,7 @@ import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryProxy;
+import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.authorize.AuthorizationException;
@@ -259,7 +260,13 @@ public class TestRPC {
         SecretManager<? extends TokenIdentifier> secretManager) throws IOException {
       return null;
     }
-    
+
+    @Override
+    public ProtocolProxy<ProtocolMetaInfoPB> getProtocolMetaInfoProxy(
+        ConnectionId connId, Configuration conf, SocketFactory factory)
+        throws IOException {
+      throw new UnsupportedOperationException("This proxy is not supported");
+    }
   }
 
   /**

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java?rev=1236936&r1=1236935&r2=1236936&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java Sat Jan 28 00:21:07 2012
@@ -32,6 +32,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
+import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
+import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
+import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.After;
 import org.junit.Test;
@@ -302,4 +305,72 @@ System.out.println("echo int is NOT supp
           ex.getMessage().contains("VersionMismatch"));
     }
   }
+  
+  @Test
+  public void testIsMethodSupported() throws IOException {
+    server = RPC.getServer(TestProtocol2.class, new TestImpl2(), ADDRESS, 0, 2,
+        false, conf, null);
+    server.start();
+    addr = NetUtils.getConnectAddress(server);
+
+    TestProtocol2 proxy = RPC.getProxy(TestProtocol2.class,
+        TestProtocol2.versionID, addr, conf);
+    boolean supported = RpcClientUtil.isMethodSupported(proxy,
+        TestProtocol2.class, RpcKind.RPC_WRITABLE,
+        RPC.getProtocolVersion(TestProtocol2.class), "echo");
+    Assert.assertTrue(supported);
+    supported = RpcClientUtil.isMethodSupported(proxy,
+        TestProtocol2.class, RpcKind.RPC_PROTOCOL_BUFFER,
+        RPC.getProtocolVersion(TestProtocol2.class), "echo");
+    Assert.assertFalse(supported);
+  }
+
+  /**
+   * Verify that ProtocolMetaInfoServerSideTranslatorPB correctly looks up
+   * the server registry to extract protocol signatures and versions.
+   */
+  @Test
+  public void testProtocolMetaInfoSSTranslatorPB() throws Exception {
+    TestImpl1 impl = new TestImpl1();
+    server = RPC.getServer(TestProtocol1.class, impl, ADDRESS, 0, 2, false,
+        conf, null);
+    server.addProtocol(RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
+    server.start();
+
+    ProtocolMetaInfoServerSideTranslatorPB xlator = 
+        new ProtocolMetaInfoServerSideTranslatorPB(server);
+
+    GetProtocolSignatureResponseProto resp = xlator.getProtocolSignature(
+        null,
+        createGetProtocolSigRequestProto(TestProtocol1.class,
+            RpcKind.RPC_PROTOCOL_BUFFER));
+    //No signatures should be found
+    Assert.assertEquals(0, resp.getProtocolSignatureCount());
+    resp = xlator.getProtocolSignature(
+        null,
+        createGetProtocolSigRequestProto(TestProtocol1.class,
+            RpcKind.RPC_WRITABLE));
+    Assert.assertEquals(1, resp.getProtocolSignatureCount());
+    ProtocolSignatureProto sig = resp.getProtocolSignatureList().get(0);
+    Assert.assertEquals(TestProtocol1.versionID, sig.getVersion());
+    boolean found = false;
+    int expected = ProtocolSignature.getFingerprint(TestProtocol1.class
+        .getMethod("echo", String.class));
+    for (int m : sig.getMethodsList()) {
+      if (expected == m) {
+        found = true;
+        break;
+      }
+    }
+    Assert.assertTrue(found);
+  }
+  
+  private GetProtocolSignatureRequestProto createGetProtocolSigRequestProto(
+      Class<?> protocol, RpcKind rpcKind) {
+    GetProtocolSignatureRequestProto.Builder builder = 
+        GetProtocolSignatureRequestProto.newBuilder();
+    builder.setProtocol(protocol.getName());
+    builder.setRpcKind(rpcKind.toString());
+    return builder.build();
+  }
 }
\ No newline at end of file