You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/04/24 21:05:16 UTC

svn commit: r1329947 - in /hadoop/common/branches/HDFS-3092/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-common/ hadoop-common/dev-support/ hadoop-common/src/main/docs/ hadoop-common/src/main...

Author: szetszwo
Date: Tue Apr 24 19:05:09 2012
New Revision: 1329947

URL: http://svn.apache.org/viewvc?rev=1329947&view=rev
Log:
Merge r1327258 through r1329943 from trunk.

Added:
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
      - copied unchanged from r1329943, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.io.compress.CompressionCodec
      - copied unchanged from r1329943, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.io.compress.CompressionCodec
Removed:
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcPayloadHeader.java
Modified:
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
    hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java Tue Apr 24 19:05:09 2012
@@ -288,7 +288,7 @@ public class KerberosAuthenticationHandl
                 String clientPrincipal = gssContext.getSrcName().toString();
                 KerberosName kerberosName = new KerberosName(clientPrincipal);
                 String userName = kerberosName.getShortName();
-                token = new AuthenticationToken(userName, clientPrincipal, TYPE);
+                token = new AuthenticationToken(userName, clientPrincipal, getType());
                 response.setStatus(HttpServletResponse.SC_OK);
                 LOG.trace("SPNEGO completed for principal [{}]", clientPrincipal);
               }

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java Tue Apr 24 19:05:09 2012
@@ -126,7 +126,7 @@ public class PseudoAuthenticationHandler
         throw new AuthenticationException("Anonymous requests are disallowed");
       }
     } else {
-      token = new AuthenticationToken(userName, userName, TYPE);
+      token = new AuthenticationToken(userName, userName, getType());
     }
     return token;
   }

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt Tue Apr 24 19:05:09 2012
@@ -61,7 +61,9 @@ Trunk (unreleased changes)
     HADOOP-8147. test-patch should run tests with -fn to avoid masking test
     failures (Robert Evans via tgraves)
 
-    HADOOP-8117. Upgrade test build to Surefire 2.12 (todd)
+    HADOOP-8290. Remove remaining references to hadoop.native.lib (harsh)
+
+    HADOOP-8285 Use ProtoBuf for RpcPayLoadHeader (sanjay radia)
 
   BUG FIXES
 
@@ -263,6 +265,10 @@ Release 2.0.0 - UNRELEASED 
     HADOOP-8280. Move VersionUtil/TestVersionUtil and GenericTestUtils from
     HDFS into Common. (Ahmed Radwan via atm)
 
+    HADOOP-8117. Upgrade test build to Surefire 2.12 (todd)
+
+    HADOOP-8152. Expand public APIs for security library classes. (atm via eli)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -358,6 +364,15 @@ Release 2.0.0 - UNRELEASED 
     HADOOP-8282. start-all.sh refers incorrectly start-dfs.sh
     existence for starting start-yarn.sh. (Devaraj K via eli)
 
+    HADOOP-7350. Use ServiceLoader to discover compression codec classes.
+    (tomwhite)
+
+    HADOOP-8284. clover integration broken, also mapreduce poms are pulling
+    in clover as a dependency. (phunt via tucu)
+
+    HADOOP-8309. Pseudo & Kerberos AuthenticationHandler should use 
+    getType() to create token (tucu)
+
   BREAKDOWN OF HADOOP-7454 SUBTASKS
 
     HADOOP-7455. HA: Introduce HA Service Protocol Interface. (suresh)
@@ -421,6 +436,9 @@ Release 0.23.3 - UNRELEASED
     HADOOP-8108. Move method getHostPortString() from NameNode to NetUtils.
     (Brandon Li via jitendra)
 
+    HADOOP-8288. Remove references of mapred.child.ulimit etc. since they are
+    not being used any more (Ravi Prakash via bobby)
+
   OPTIMIZATIONS
 
   BUG FIXES

Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1327258-1329943

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Tue Apr 24 19:05:09 2012
@@ -282,8 +282,13 @@
       <!-- protobuf generated code -->
       <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.IpcConnectionContextProtos.*"/>
     </Match>
+        <Match>
+      <!-- protobuf generated code -->
+      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcPayloadHeaderProtos.*"/>
+    </Match>
     <Match>
       <!-- protobuf generated code -->
       <Class name="~org\.apache\.hadoop\.ha\.proto\.HAServiceProtocolProtos.*"/>
     </Match>
+
  </FindBugsFilter>

Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1327258-1329943

Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1327258-1329943

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java Tue Apr 24 19:05:09 2012
@@ -36,6 +36,9 @@ public class CompressionCodecFactory {
 
   public static final Log LOG =
     LogFactory.getLog(CompressionCodecFactory.class.getName());
+  
+  private static final ServiceLoader<CompressionCodec> CODEC_PROVIDERS =
+    ServiceLoader.load(CompressionCodec.class);
 
   /**
    * A map from the reversed filename suffixes to the codecs.
@@ -95,16 +98,23 @@ public class CompressionCodecFactory {
   }
 
   /**
-   * Get the list of codecs listed in the configuration
+   * Get the list of codecs discovered via a Java ServiceLoader, or
+   * listed in the configuration. Codecs specified in configuration come
+   * later in the returned list, and are considered to override those
+   * from the ServiceLoader.
    * @param conf the configuration to look in
-   * @return a list of the Configuration classes or null if the attribute
-   *         was not set
+   * @return a list of the {@link CompressionCodec} classes
    */
   public static List<Class<? extends CompressionCodec>> getCodecClasses(Configuration conf) {
+    List<Class<? extends CompressionCodec>> result
+      = new ArrayList<Class<? extends CompressionCodec>>();
+    // Add codec classes discovered via service loading
+    for (CompressionCodec codec : CODEC_PROVIDERS) {
+      result.add(codec.getClass());
+    }
+    // Add codec classes from configuration
     String codecsString = conf.get("io.compression.codecs");
     if (codecsString != null) {
-      List<Class<? extends CompressionCodec>> result
-        = new ArrayList<Class<? extends CompressionCodec>>();
       StringTokenizer codecSplit = new StringTokenizer(codecsString, ",");
       while (codecSplit.hasMoreElements()) {
         String codecSubstring = codecSplit.nextToken();
@@ -123,14 +133,14 @@ public class CompressionCodecFactory {
           }
         }
       }
-      return result;
-    } else {
-      return null;
     }
+    return result;
   }
   
   /**
-   * Sets a list of codec classes in the configuration.
+   * Sets a list of codec classes in the configuration. In addition to any
+   * classes specified using this method, {@link CompressionCodec} classes on
+   * the classpath are discovered using a Java ServiceLoader.
    * @param conf the configuration to modify
    * @param classes the list of classes to set
    */
@@ -151,21 +161,19 @@ public class CompressionCodecFactory {
   
   /**
    * Find the codecs specified in the config value io.compression.codecs 
-   * and register them. Defaults to gzip and zip.
+   * and register them. Defaults to gzip and deflate.
    */
   public CompressionCodecFactory(Configuration conf) {
     codecs = new TreeMap<String, CompressionCodec>();
     codecsByClassName = new HashMap<String, CompressionCodec>();
     codecsByName = new HashMap<String, CompressionCodec>();
     List<Class<? extends CompressionCodec>> codecClasses = getCodecClasses(conf);
-    if (codecClasses == null) {
+    if (codecClasses == null || codecClasses.isEmpty()) {
       addCodec(new GzipCodec());
       addCodec(new DefaultCodec());      
     } else {
-      Iterator<Class<? extends CompressionCodec>> itr = codecClasses.iterator();
-      while (itr.hasNext()) {
-        CompressionCodec codec = ReflectionUtils.newInstance(itr.next(), conf);
-        addCodec(codec);     
+      for (Class<? extends CompressionCodec> codecClass : codecClasses) {
+        addCodec(ReflectionUtils.newInstance(codecClass, conf));
       }
     }
   }

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Tue Apr 24 19:05:09 2012
@@ -50,8 +50,9 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.ipc.RpcPayloadHeader.*;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
+import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto;
+import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
@@ -163,10 +164,10 @@ public class Client {
     final Writable rpcRequest;  // the serialized rpc request - RpcPayload
     Writable rpcResponse;       // null if rpc has error
     IOException error;          // exception, null if success
-    final RpcKind rpcKind;      // Rpc EngineKind
+    final RPC.RpcKind rpcKind;      // Rpc EngineKind
     boolean done;               // true when call is done
 
-    protected Call(RpcKind rpcKind, Writable param) {
+    protected Call(RPC.RpcKind rpcKind, Writable param) {
       this.rpcKind = rpcKind;
       this.rpcRequest = param;
       synchronized (Client.this) {
@@ -613,7 +614,7 @@ public class Client {
             this.in = new DataInputStream(new BufferedInputStream(inStream));
           }
           this.out = new DataOutputStream(new BufferedOutputStream(outStream));
-          writeHeader();
+          writeConnectionContext();
 
           // update last activity time
           touch();
@@ -704,16 +705,17 @@ public class Client {
       out.flush();
     }
     
-    /* Write the protocol header for each connection
+    /* Write the connection context header for each connection
      * Out is not synchronized because only the first thread does this.
      */
-    private void writeHeader() throws IOException {
+    private void writeConnectionContext() throws IOException {
       // Write out the ConnectionHeader
       DataOutputBuffer buf = new DataOutputBuffer();
       connectionContext.writeTo(buf);
       
       // Write out the payload length
       int bufLen = buf.getLength();
+
       out.writeInt(bufLen);
       out.write(buf.getData(), 0, bufLen);
     }
@@ -806,21 +808,22 @@ public class Client {
           if (LOG.isDebugEnabled())
             LOG.debug(getName() + " sending #" + call.id);
           
-          //for serializing the
-          //data to be written
+          // Serializing the data to be written.
+          // Format:
+          // 0) Length of rest below (1 + 2)
+          // 1) PayloadHeader  - is serialized Delimited hence contains length
+          // 2) the Payload - the RpcRequest
+          //
           d = new DataOutputBuffer();
-          d.writeInt(0); // placeholder for data length
-          RpcPayloadHeader header = new RpcPayloadHeader(
-              call.rpcKind, RpcPayloadOperation.RPC_FINAL_PAYLOAD, call.id);
-          header.write(d);
+          RpcPayloadHeaderProto header = ProtoUtil.makeRpcPayloadHeader(
+             call.rpcKind, RpcPayloadOperationProto.RPC_FINAL_PAYLOAD, call.id);
+          header.writeDelimitedTo(d);
           call.rpcRequest.write(d);
           byte[] data = d.getData();
-          int dataLength = d.getLength() - 4;
-          data[0] = (byte)((dataLength >>> 24) & 0xff);
-          data[1] = (byte)((dataLength >>> 16) & 0xff);
-          data[2] = (byte)((dataLength >>> 8) & 0xff);
-          data[3] = (byte)(dataLength & 0xff);
-          out.write(data, 0, dataLength + 4);//write the data
+   
+          int totalLength = d.getLength();
+          out.writeInt(totalLength); // Total Length
+          out.write(data, 0, totalLength);//PayloadHeader + RpcRequest
           out.flush();
         }
       } catch(IOException e) {
@@ -937,7 +940,7 @@ public class Client {
     private int index;
     
     public ParallelCall(Writable param, ParallelResults results, int index) {
-      super(RpcKind.RPC_WRITABLE, param);
+      super(RPC.RpcKind.RPC_WRITABLE, param);
       this.results = results;
       this.index = index;
     }
@@ -1022,22 +1025,22 @@ public class Client {
   }
 
   /**
-   * Same as {@link #call(RpcPayloadHeader.RpcKind, Writable, ConnectionId)}
+   * Same as {@link #call(RPC.RpcKind, Writable, ConnectionId)}
    *  for RPC_BUILTIN
    */
   public Writable call(Writable param, InetSocketAddress address)
   throws InterruptedException, IOException {
-    return call(RpcKind.RPC_BUILTIN, param, address);
+    return call(RPC.RpcKind.RPC_BUILTIN, param, address);
     
   }
   /** Make a call, passing <code>param</code>, to the IPC server running at
    * <code>address</code>, returning the value.  Throws exceptions if there are
    * network problems or if the remote code threw an exception.
-   * @deprecated Use {@link #call(RpcPayloadHeader.RpcKind, Writable,
+   * @deprecated Use {@link #call(RPC.RpcKind, Writable,
    *  ConnectionId)} instead 
    */
   @Deprecated
-  public Writable call(RpcKind rpcKind, Writable param, InetSocketAddress address)
+  public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress address)
   throws InterruptedException, IOException {
       return call(rpcKind, param, address, null);
   }
@@ -1047,11 +1050,11 @@ public class Client {
    * the value.  
    * Throws exceptions if there are network problems or if the remote code 
    * threw an exception.
-   * @deprecated Use {@link #call(RpcPayloadHeader.RpcKind, Writable, 
+   * @deprecated Use {@link #call(RPC.RpcKind, Writable, 
    * ConnectionId)} instead 
    */
   @Deprecated
-  public Writable call(RpcKind rpcKind, Writable param, InetSocketAddress addr, 
+  public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress addr, 
       UserGroupInformation ticket)  
       throws InterruptedException, IOException {
     ConnectionId remoteId = ConnectionId.getConnectionId(addr, null, ticket, 0,
@@ -1065,11 +1068,11 @@ public class Client {
    * timeout, returning the value.  
    * Throws exceptions if there are network problems or if the remote code 
    * threw an exception. 
-   * @deprecated Use {@link #call(RpcPayloadHeader.RpcKind, Writable,
+   * @deprecated Use {@link #call(RPC.RpcKind, Writable,
    *  ConnectionId)} instead 
    */
   @Deprecated
-  public Writable call(RpcKind rpcKind, Writable param, InetSocketAddress addr, 
+  public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress addr, 
                        Class<?> protocol, UserGroupInformation ticket,
                        int rpcTimeout)  
                        throws InterruptedException, IOException {
@@ -1080,7 +1083,7 @@ public class Client {
 
   
   /**
-   * Same as {@link #call(RpcPayloadHeader.RpcKind, Writable, InetSocketAddress, 
+   * Same as {@link #call(RPC.RpcKind, Writable, InetSocketAddress, 
    * Class, UserGroupInformation, int, Configuration)}
    * except that rpcKind is writable.
    */
@@ -1090,7 +1093,7 @@ public class Client {
       throws InterruptedException, IOException {
         ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
         ticket, rpcTimeout, conf);
-    return call(RpcKind.RPC_BUILTIN, param, remoteId);
+    return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId);
   }
   
   /**
@@ -1101,7 +1104,7 @@ public class Client {
    * value. Throws exceptions if there are network problems or if the remote
    * code threw an exception.
    */
-  public Writable call(RpcKind rpcKind, Writable param, InetSocketAddress addr, 
+  public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress addr, 
                        Class<?> protocol, UserGroupInformation ticket,
                        int rpcTimeout, Configuration conf)  
                        throws InterruptedException, IOException {
@@ -1111,12 +1114,12 @@ public class Client {
   }
   
   /**
-   * Same as {link {@link #call(RpcPayloadHeader.RpcKind, Writable, ConnectionId)}
+   * Same as {link {@link #call(RPC.RpcKind, Writable, ConnectionId)}
    * except the rpcKind is RPC_BUILTIN
    */
   public Writable call(Writable param, ConnectionId remoteId)  
       throws InterruptedException, IOException {
-     return call(RpcKind.RPC_BUILTIN, param, remoteId);
+     return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId);
   }
   
   /** 
@@ -1130,7 +1133,7 @@ public class Client {
    * Throws exceptions if there are network problems or if the remote code 
    * threw an exception.
    */
-  public Writable call(RpcKind rpcKind, Writable rpcRequest,
+  public Writable call(RPC.RpcKind rpcKind, Writable rpcRequest,
       ConnectionId remoteId) throws InterruptedException, IOException {
     Call call = new Call(rpcKind, rpcRequest);
     Connection connection = getConnection(remoteId, call);

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Tue Apr 24 19:05:09 2012
@@ -38,7 +38,6 @@ import org.apache.hadoop.io.DataOutputOu
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 
 import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -61,7 +60,7 @@ public class ProtobufRpcEngine implement
   
   static { // Register the rpcRequest deserializer for WritableRpcEngine 
     org.apache.hadoop.ipc.Server.registerProtocolEngine(
-        RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWritable.class,
+        RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWritable.class,
         new Server.ProtoBufRpcInvoker());
   }
 
@@ -182,7 +181,7 @@ public class ProtobufRpcEngine implement
       HadoopRpcRequestProto rpcRequest = constructRpcRequest(method, args);
       RpcResponseWritable val = null;
       try {
-        val = (RpcResponseWritable) client.call(RpcKind.RPC_PROTOCOL_BUFFER,
+        val = (RpcResponseWritable) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
             new RpcRequestWritable(rpcRequest), remoteId);
       } catch (Throwable e) {
         throw new ServiceException(e);
@@ -351,7 +350,7 @@ public class ProtobufRpcEngine implement
           numReaders, queueSizePerHandler, conf, classNameBase(protocolImpl
               .getClass().getName()), secretManager, portRangeConfig);
       this.verbose = verbose;  
-      registerProtocolAndImpl(RpcKind.RPC_PROTOCOL_BUFFER, protocolClass,
+      registerProtocolAndImpl(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocolClass,
           protocolImpl);
     }
     
@@ -363,10 +362,10 @@ public class ProtobufRpcEngine implement
           String protoName, long version) throws IOException {
         ProtoNameVer pv = new ProtoNameVer(protoName, version);
         ProtoClassProtoImpl impl = 
-            server.getProtocolImplMap(RpcKind.RPC_PROTOCOL_BUFFER).get(pv);
+            server.getProtocolImplMap(RPC.RpcKind.RPC_PROTOCOL_BUFFER).get(pv);
         if (impl == null) { // no match for Protocol AND Version
           VerProtocolImpl highest = 
-              server.getHighestSupportedProtocol(RpcKind.RPC_PROTOCOL_BUFFER, 
+              server.getHighestSupportedProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, 
                   protoName);
           if (highest == null) {
             throw new IOException("Unknown protocol: " + protoName);

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInfoServerSideTranslatorPB.java Tue Apr 24 19:05:09 2012
@@ -18,7 +18,6 @@
 package org.apache.hadoop.ipc;
 
 import org.apache.hadoop.ipc.RPC.Server.VerProtocolImpl;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolVersionsRequestProto;
@@ -49,7 +48,7 @@ public class ProtocolMetaInfoServerSideT
     String protocol = request.getProtocol();
     GetProtocolVersionsResponseProto.Builder builder = 
         GetProtocolVersionsResponseProto.newBuilder();
-    for (RpcKind r : RpcKind.values()) {
+    for (RPC.RpcKind r : RPC.RpcKind.values()) {
       long[] versions;
       try {
         versions = getProtocolVersionForRpcKind(r, protocol);
@@ -78,7 +77,7 @@ public class ProtocolMetaInfoServerSideT
     String rpcKind = request.getRpcKind();
     long[] versions;
     try {
-      versions = getProtocolVersionForRpcKind(RpcKind.valueOf(rpcKind),
+      versions = getProtocolVersionForRpcKind(RPC.RpcKind.valueOf(rpcKind),
           protocol);
     } catch (ClassNotFoundException e1) {
       throw new ServiceException(e1);
@@ -104,7 +103,7 @@ public class ProtocolMetaInfoServerSideT
     return builder.build();
   }
   
-  private long[] getProtocolVersionForRpcKind(RpcKind rpcKind,
+  private long[] getProtocolVersionForRpcKind(RPC.RpcKind rpcKind,
       String protocol) throws ClassNotFoundException {
     Class<?> protocolClass = Class.forName(protocol);
     String protocolName = RPC.getProtocolName(protocolClass);

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Tue Apr 24 19:05:09 2012
@@ -42,7 +42,6 @@ import org.apache.commons.logging.*;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.Client.ConnectionId;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolInfoService;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SaslRpcServer;
@@ -73,6 +72,18 @@ import com.google.protobuf.BlockingServi
  * the protocol instance is transmitted.
  */
 public class RPC {
+  public enum RpcKind {
+    RPC_BUILTIN ((short) 1),         // Used for built in calls by tests
+    RPC_WRITABLE ((short) 2),        // Use WritableRpcEngine 
+    RPC_PROTOCOL_BUFFER ((short) 3); // Use ProtobufRpcEngine
+    final static short MAX_INDEX = RPC_PROTOCOL_BUFFER.value; // used for array size
+    private static final short FIRST_INDEX = RPC_BUILTIN.value;    
+    public final short value; //TODO make it private
+
+    RpcKind(short val) {
+      this.value = val;
+    } 
+  }
   
   interface RpcInvoker {   
     /**
@@ -777,7 +788,7 @@ public class RPC {
    ArrayList<Map<ProtoNameVer, ProtoClassProtoImpl>> protocolImplMapArray = 
        new ArrayList<Map<ProtoNameVer, ProtoClassProtoImpl>>(RpcKind.MAX_INDEX);
    
-   Map<ProtoNameVer, ProtoClassProtoImpl> getProtocolImplMap(RpcKind rpcKind) {
+   Map<ProtoNameVer, ProtoClassProtoImpl> getProtocolImplMap(RPC.RpcKind rpcKind) {
      if (protocolImplMapArray.size() == 0) {// initialize for all rpc kinds
        for (int i=0; i <= RpcKind.MAX_INDEX; ++i) {
          protocolImplMapArray.add(
@@ -821,7 +832,7 @@ public class RPC {
    
    
    @SuppressWarnings("unused") // will be useful later.
-   VerProtocolImpl[] getSupportedProtocolVersions(RpcKind rpcKind,
+   VerProtocolImpl[] getSupportedProtocolVersions(RPC.RpcKind rpcKind,
        String protocolName) {
      VerProtocolImpl[] resultk = 
          new  VerProtocolImpl[getProtocolImplMap(rpcKind).size()];
@@ -900,7 +911,7 @@ public class RPC {
     }
     
     @Override
-    public Writable call(RpcKind rpcKind, String protocol,
+    public Writable call(RPC.RpcKind rpcKind, String protocol,
         Writable rpcRequest, long receiveTime) throws Exception {
       return getRpcInvoker(rpcKind).call(this, protocol, rpcRequest,
           receiveTime);

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java Tue Apr 24 19:05:09 2012
@@ -27,7 +27,6 @@ import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
@@ -107,7 +106,7 @@ public class RpcClientUtil {
    * @throws IOException
    */
   public static boolean isMethodSupported(Object rpcProxy, Class<?> protocol,
-      RpcKind rpcKind, long version, String methodName) throws IOException {
+      RPC.RpcKind rpcKind, long version, String methodName) throws IOException {
     InetSocketAddress serverAddress = RPC.getServerAddress(rpcProxy);
     Map<Long, ProtocolSignature> versionMap = getVersionSignatureMap(
         serverAddress, protocol.getName(), rpcKind.toString());

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Tue Apr 24 19:05:09 2012
@@ -72,11 +72,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
 import org.apache.hadoop.ipc.RPC.VersionMismatch;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcPayloadOperation;
 import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
+import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.*;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SaslRpcServer;
@@ -170,8 +169,8 @@ public abstract class Server {
       this.rpcRequestWrapperClass = rpcRequestWrapperClass;
     }   
   }
-  static Map<RpcKind, RpcKindMapValue> rpcKindMap = new
-      HashMap<RpcKind, RpcKindMapValue>(4);
+  static Map<RPC.RpcKind, RpcKindMapValue> rpcKindMap = new
+      HashMap<RPC.RpcKind, RpcKindMapValue>(4);
   
   
 
@@ -185,7 +184,7 @@ public abstract class Server {
    *  @param rpcInvoker - use to process the calls on SS.
    */
   
-  public static void registerProtocolEngine(RpcKind rpcKind, 
+  public static void registerProtocolEngine(RPC.RpcKind rpcKind, 
           Class<? extends Writable> rpcRequestWrapperClass,
           RpcInvoker rpcInvoker) {
     RpcKindMapValue  old = 
@@ -201,14 +200,14 @@ public abstract class Server {
   }
   
   public Class<? extends Writable> getRpcRequestWrapper(
-      RpcKind rpcKind) {
+      RpcKindProto rpcKind) {
     if (rpcRequestClass != null)
        return rpcRequestClass;
-    RpcKindMapValue val = rpcKindMap.get(rpcKind);
+    RpcKindMapValue val = rpcKindMap.get(ProtoUtil.convert(rpcKind));
     return (val == null) ? null : val.rpcRequestWrapperClass; 
   }
   
-  public static RpcInvoker  getRpcInvoker(RpcKind rpcKind) {
+  public static RpcInvoker  getRpcInvoker(RPC.RpcKind rpcKind) {
     RpcKindMapValue val = rpcKindMap.get(rpcKind);
     return (val == null) ? null : val.rpcInvoker; 
   }
@@ -403,12 +402,12 @@ public abstract class Server {
     private long timestamp;               // time received when response is null
                                           // time served when response is not null
     private ByteBuffer rpcResponse;       // the response for this call
-    private final RpcKind rpcKind;
+    private final RPC.RpcKind rpcKind;
 
     public Call(int id, Writable param, Connection connection) {
-      this( id,  param,  connection, RpcKind.RPC_BUILTIN );    
+      this( id,  param,  connection, RPC.RpcKind.RPC_BUILTIN );    
     }
-    public Call(int id, Writable param, Connection connection, RpcKind kind) { 
+    public Call(int id, Writable param, Connection connection, RPC.RpcKind kind) { 
       this.callId = id;
       this.rpcRequest = param;
       this.connection = connection;
@@ -1366,7 +1365,6 @@ public abstract class Server {
         if (data == null) {
           dataLengthBuffer.flip();
           dataLength = dataLengthBuffer.getInt();
-       
           if ((dataLength == Client.PING_CALL_ID) && (!useWrap)) {
             // covers the !useSasl too
             dataLengthBuffer.clear();
@@ -1555,22 +1553,27 @@ public abstract class Server {
     private void processData(byte[] buf) throws  IOException, InterruptedException {
       DataInputStream dis =
         new DataInputStream(new ByteArrayInputStream(buf));
-      RpcPayloadHeader header = new RpcPayloadHeader();
-      header.readFields(dis);           // Read the RpcPayload header
+      RpcPayloadHeaderProto header = RpcPayloadHeaderProto.parseDelimitedFrom(dis);
         
       if (LOG.isDebugEnabled())
         LOG.debug(" got #" + header.getCallId());
-      if (header.getOperation() != RpcPayloadOperation.RPC_FINAL_PAYLOAD) {
+      if (!header.hasRpcOp()) {
+        throw new IOException(" IPC Server: No rpc op in rpcPayloadHeader");
+      }
+      if (header.getRpcOp() != RpcPayloadOperationProto.RPC_FINAL_PAYLOAD) {
         throw new IOException("IPC Server does not implement operation" + 
-              header.getOperation());
+              header.getRpcOp());
       }
       // If we know the rpc kind, get its class so that we can deserialize
       // (Note it would make more sense to have the handler deserialize but 
       // we continue with this original design.
+      if (!header.hasRpcKind()) {
+        throw new IOException(" IPC Server: No rpc kind in rpcPayloadHeader");
+      }
       Class<? extends Writable> rpcRequestClass = 
-          getRpcRequestWrapper(header.getkind());
+          getRpcRequestWrapper(header.getRpcKind());
       if (rpcRequestClass == null) {
-        LOG.warn("Unknown rpc kind "  + header.getkind() + 
+        LOG.warn("Unknown rpc kind "  + header.getRpcKind() + 
             " from client " + getHostAddress());
         final Call readParamsFailedCall = 
             new Call(header.getCallId(), null, this);
@@ -1578,7 +1581,7 @@ public abstract class Server {
 
         setupResponse(responseBuffer, readParamsFailedCall, Status.FATAL, null,
             IOException.class.getName(),
-            "Unknown rpc kind "  + header.getkind());
+            "Unknown rpc kind "  + header.getRpcKind());
         responder.doRespond(readParamsFailedCall);
         return;   
       }
@@ -1589,7 +1592,7 @@ public abstract class Server {
       } catch (Throwable t) {
         LOG.warn("Unable to read call parameters for client " +
                  getHostAddress() + "on connection protocol " +
-            this.protocolName + " for rpcKind " + header.getkind(),  t);
+            this.protocolName + " for rpcKind " + header.getRpcKind(),  t);
         final Call readParamsFailedCall = 
             new Call(header.getCallId(), null, this);
         ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream();
@@ -1601,7 +1604,8 @@ public abstract class Server {
         return;
       }
         
-      Call call = new Call(header.getCallId(), rpcRequest, this, header.getkind());
+      Call call = new Call(header.getCallId(), rpcRequest, this, 
+          ProtoUtil.convert(header.getRpcKind()));
       callQueue.put(call);              // queue the call; maybe blocked here
       incRpcCount();  // Increment the rpc count
     }
@@ -1991,11 +1995,11 @@ public abstract class Server {
    */
   @Deprecated
   public Writable call(Writable param, long receiveTime) throws Exception {
-    return call(RpcKind.RPC_BUILTIN, null, param, receiveTime);
+    return call(RPC.RpcKind.RPC_BUILTIN, null, param, receiveTime);
   }
   
   /** Called for each call. */
-  public abstract Writable call(RpcKind rpcKind, String protocol,
+  public abstract Writable call(RPC.RpcKind rpcKind, String protocol,
       Writable param, long receiveTime) throws Exception;
   
   /**

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Tue Apr 24 19:05:09 2012
@@ -33,7 +33,6 @@ import org.apache.commons.logging.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
@@ -75,7 +74,7 @@ public class WritableRpcEngine implement
    * Register the rpcRequest deserializer for WritableRpcEngine
    */
   private static synchronized void initialize() {
-    org.apache.hadoop.ipc.Server.registerProtocolEngine(RpcKind.RPC_WRITABLE,
+    org.apache.hadoop.ipc.Server.registerProtocolEngine(RPC.RpcKind.RPC_WRITABLE,
         Invocation.class, new Server.WritableRpcInvoker());
     isInitialized = true;
   }
@@ -223,7 +222,7 @@ public class WritableRpcEngine implement
       }
 
       ObjectWritable value = (ObjectWritable)
-        client.call(RpcKind.RPC_WRITABLE, new Invocation(method, args), remoteId);
+        client.call(RPC.RpcKind.RPC_WRITABLE, new Invocation(method, args), remoteId);
       if (LOG.isDebugEnabled()) {
         long callTime = System.currentTimeMillis() - startTime;
         LOG.debug("Call: " + method.getName() + " " + callTime);
@@ -412,12 +411,12 @@ public class WritableRpcEngine implement
               protocolImpl.getClass());
         }
         // register protocol class and its super interfaces
-        registerProtocolAndImpl(RpcKind.RPC_WRITABLE, protocolClass, protocolImpl);
+        registerProtocolAndImpl(RPC.RpcKind.RPC_WRITABLE, protocolClass, protocolImpl);
         protocols = RPC.getProtocolInterfaces(protocolClass);
       }
       for (Class<?> p : protocols) {
         if (!p.equals(VersionedProtocol.class)) {
-          registerProtocolAndImpl(RpcKind.RPC_WRITABLE, p, protocolImpl);
+          registerProtocolAndImpl(RPC.RpcKind.RPC_WRITABLE, p, protocolImpl);
         }
       }
 
@@ -461,7 +460,7 @@ public class WritableRpcEngine implement
             // registered directly.
             // Send the call to the highest  protocol version
             VerProtocolImpl highest = server.getHighestSupportedProtocol(
-                RpcKind.RPC_WRITABLE, protocolName);
+                RPC.RpcKind.RPC_WRITABLE, protocolName);
             if (highest == null) {
               throw new IOException("Unknown protocol: " + protocolName);
             }
@@ -473,10 +472,10 @@ public class WritableRpcEngine implement
             ProtoNameVer pv = 
                 new ProtoNameVer(call.declaringClassProtocolName, clientVersion);
             protocolImpl = 
-                server.getProtocolImplMap(RpcKind.RPC_WRITABLE).get(pv);
+                server.getProtocolImplMap(RPC.RpcKind.RPC_WRITABLE).get(pv);
             if (protocolImpl == null) { // no match for Protocol AND Version
                VerProtocolImpl highest = 
-                   server.getHighestSupportedProtocol(RpcKind.RPC_WRITABLE, 
+                   server.getHighestSupportedProtocol(RPC.RpcKind.RPC_WRITABLE, 
                        protoName);
               if (highest == null) {
                 throw new IOException("Unknown protocol: " + protoName);

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Tue Apr 24 19:05:09 2012
@@ -220,6 +220,8 @@ public class SecurityUtil {
    * @return converted Kerberos principal name
    * @throws IOException if the client address cannot be determined
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static String getServerPrincipal(String principalConfig,
       String hostname) throws IOException {
     String[] components = getComponents(principalConfig);
@@ -245,6 +247,8 @@ public class SecurityUtil {
    * @return converted Kerberos principal name
    * @throws IOException if the client address cannot be determined
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static String getServerPrincipal(String principalConfig,
       InetAddress addr) throws IOException {
     String[] components = getComponents(principalConfig);
@@ -292,6 +296,8 @@ public class SecurityUtil {
    *          the key to look for user's Kerberos principal name in conf
    * @throws IOException if login fails
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static void login(final Configuration conf,
       final String keytabFileKey, final String userNameKey) throws IOException {
     login(conf, keytabFileKey, userNameKey, getLocalHostName());
@@ -312,6 +318,8 @@ public class SecurityUtil {
    *          hostname to use for substitution
    * @throws IOException if the config doesn't specify a keytab
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static void login(final Configuration conf,
       final String keytabFileKey, final String userNameKey, String hostname)
       throws IOException {

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Tue Apr 24 19:05:09 2012
@@ -69,7 +69,7 @@ import org.apache.hadoop.util.Shell;
  * user's username and groups. It supports both the Windows, Unix and Kerberos 
  * login modules.
  */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive", "Oozie"})
 @InterfaceStability.Evolving
 public class UserGroupInformation {
   private static final Log LOG =  LogFactory.getLog(UserGroupInformation.class);
@@ -258,6 +258,8 @@ public class UserGroupInformation {
    * group look up service.
    * @param conf the configuration to use
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static void setConfiguration(Configuration conf) {
     initialize(conf, false);
   }
@@ -500,6 +502,8 @@ public class UserGroupInformation {
    * @return the current user
    * @throws IOException if login fails
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public synchronized
   static UserGroupInformation getCurrentUser() throws IOException {
     AccessControlContext context = AccessController.getContext();
@@ -516,6 +520,8 @@ public class UserGroupInformation {
    * @return the logged in user
    * @throws IOException if login fails
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public synchronized 
   static UserGroupInformation getLoginUser() throws IOException {
     if (loginUser == null) {
@@ -652,6 +658,8 @@ public class UserGroupInformation {
    * @param path the path to the keytab file
    * @throws IOException if the keytab file can't be read
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public synchronized
   static void loginUserFromKeytab(String user,
                                   String path
@@ -710,6 +718,8 @@ public class UserGroupInformation {
    * the new credentials.
    * @throws IOException on a failure
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public synchronized void reloginFromKeytab()
   throws IOException {
     if (!isSecurityEnabled() ||
@@ -769,6 +779,8 @@ public class UserGroupInformation {
    * the new credentials.
    * @throws IOException on a failure
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public synchronized void reloginFromTicketCache()
   throws IOException {
     if (!isSecurityEnabled() || 
@@ -867,6 +879,8 @@ public class UserGroupInformation {
    * Did the login happen via keytab
    * @return true or false
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public synchronized static boolean isLoginKeytabBased() throws IOException {
     return getLoginUser().isKeytab;
   }
@@ -877,6 +891,8 @@ public class UserGroupInformation {
    * @param user the full user principal name, must not be empty or null
    * @return the UserGroupInformation for the remote user.
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static UserGroupInformation createRemoteUser(String user) {
     if (user == null || "".equals(user)) {
       throw new IllegalArgumentException("Null user");
@@ -891,6 +907,7 @@ public class UserGroupInformation {
   /**
    * existing types of authentications' methods
    */
+  @InterfaceAudience.Public
   @InterfaceStability.Evolving
   public static enum AuthenticationMethod {
     SIMPLE,
@@ -908,6 +925,8 @@ public class UserGroupInformation {
    * @param realUser
    * @return proxyUser ugi
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static UserGroupInformation createProxyUser(String user,
       UserGroupInformation realUser) {
     if (user == null || "".equals(user)) {
@@ -929,6 +948,8 @@ public class UserGroupInformation {
    * get RealUser (vs. EffectiveUser)
    * @return realUser running over proxy user
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public UserGroupInformation getRealUser() {
     for (RealUser p: subject.getPrincipals(RealUser.class)) {
       return p.getRealUser();
@@ -974,7 +995,8 @@ public class UserGroupInformation {
    * @param userGroups the names of the groups that the user belongs to
    * @return a fake user for running unit tests
    */
-  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static UserGroupInformation createUserForTesting(String user, 
                                                           String[] userGroups) {
     ensureInitialized();
@@ -1000,7 +1022,6 @@ public class UserGroupInformation {
    *          the names of the groups that the user belongs to
    * @return a fake user for running unit tests
    */
-  @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   public static UserGroupInformation createProxyUserForTesting(String user,
       UserGroupInformation realUser, String[] userGroups) {
     ensureInitialized();
@@ -1029,6 +1050,8 @@ public class UserGroupInformation {
    * Get the user's full principal name.
    * @return the user's full principal name.
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public String getUserName() {
     return user.getName();
   }
@@ -1182,6 +1205,8 @@ public class UserGroupInformation {
    * @param action the method to execute
    * @return the value from the run method
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public <T> T doAs(PrivilegedAction<T> action) {
     logPrivilegedAction(subject, action);
     return Subject.doAs(subject, action);
@@ -1198,6 +1223,8 @@ public class UserGroupInformation {
    * @throws InterruptedException if the action throws an InterruptedException
    * @throws UndeclaredThrowableException if the action throws something else
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public <T> T doAs(PrivilegedExceptionAction<T> action
                     ) throws IOException, InterruptedException {
     try {

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java Tue Apr 24 19:05:09 2012
@@ -21,8 +21,10 @@ package org.apache.hadoop.util;
 import java.io.DataInput;
 import java.io.IOException;
 
+import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto;
+import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.*;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -135,4 +137,30 @@ public abstract class ProtoUtil {
     }
     return ugi;
   }
+  
+  static RpcKindProto convert(RPC.RpcKind kind) {
+    switch (kind) {
+    case RPC_BUILTIN: return RpcKindProto.RPC_BUILTIN;
+    case RPC_WRITABLE: return RpcKindProto.RPC_WRITABLE;
+    case RPC_PROTOCOL_BUFFER: return RpcKindProto.RPC_PROTOCOL_BUFFER;
+    }
+    return null;
+  }
+  
+  
+  public static RPC.RpcKind convert( RpcKindProto kind) {
+    switch (kind) {
+    case RPC_BUILTIN: return RPC.RpcKind.RPC_BUILTIN;
+    case RPC_WRITABLE: return RPC.RpcKind.RPC_WRITABLE;
+    case RPC_PROTOCOL_BUFFER: return RPC.RpcKind.RPC_PROTOCOL_BUFFER;
+    }
+    return null;
+  }
+ 
+  public static RpcPayloadHeaderProto makeRpcPayloadHeader(RPC.RpcKind rpcKind,
+      RpcPayloadOperationProto operation, int callId) {
+    RpcPayloadHeaderProto.Builder result = RpcPayloadHeaderProto.newBuilder();
+    result.setRpcKind(convert(rpcKind)).setRpcOp(operation).setCallId(callId);
+    return result.build();
+  }
 }

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Tue Apr 24 19:05:09 2012
@@ -81,64 +81,6 @@ abstract public class Shell {
   /** If or not script timed out*/
   private AtomicBoolean timedOut;
 
-  /** a Unix command to get ulimit of a process. */
-  public static final String ULIMIT_COMMAND = "ulimit";
-  
-  /** 
-   * Get the Unix command for setting the maximum virtual memory available
-   * to a given child process. This is only relevant when we are forking a
-   * process from within the Mapper or the Reducer implementations.
-   * Also see Hadoop Pipes and Hadoop Streaming.
-   * 
-   * It also checks to ensure that we are running on a *nix platform else 
-   * (e.g. in Cygwin/Windows) it returns <code>null</code>.
-   * @param memoryLimit virtual memory limit
-   * @return a <code>String[]</code> with the ulimit command arguments or 
-   *         <code>null</code> if we are running on a non *nix platform or
-   *         if the limit is unspecified.
-   */
-  public static String[] getUlimitMemoryCommand(int memoryLimit) {
-    // ulimit isn't supported on Windows
-    if (WINDOWS) {
-      return null;
-    }
-    
-    return new String[] {ULIMIT_COMMAND, "-v", String.valueOf(memoryLimit)};
-  }
-  
-  /** 
-   * Get the Unix command for setting the maximum virtual memory available
-   * to a given child process. This is only relevant when we are forking a
-   * process from within the Mapper or the Reducer implementations.
-   * see also Hadoop Pipes and Streaming.
-   * 
-   * It also checks to ensure that we are running on a *nix platform else 
-   * (e.g. in Cygwin/Windows) it returns <code>null</code>.
-   * @param conf configuration
-   * @return a <code>String[]</code> with the ulimit command arguments or 
-   *         <code>null</code> if we are running on a non *nix platform or
-   *         if the limit is unspecified.
-   * @deprecated Use {@link #getUlimitMemoryCommand(int)}
-   */
-  @Deprecated
-  public static String[] getUlimitMemoryCommand(Configuration conf) {
-    // ulimit isn't supported on Windows
-    if (WINDOWS) {
-      return null;
-    }
-    
-    // get the memory limit from the configuration
-    String ulimit = conf.get("mapred.child.ulimit");
-    if (ulimit == null) {
-      return null;
-    }
-    
-    // Parse it to ensure it is legal/sane
-    int memoryLimit = Integer.valueOf(ulimit);
-    
-    return getUlimitMemoryCommand(memoryLimit);
-  }
-  
   /** Set to true on Windows platforms */
   public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
                 = System.getProperty("os.name").startsWith("Windows");

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml Tue Apr 24 19:05:09 2012
@@ -109,11 +109,6 @@
   </property>
 
   <property>
-    <name>mapred.child.ulimit</name>
-    <value>8388608</value>
-  </property>
-
-  <property>
     <name>mapred.job.tracker.persist.jobstatus.active</name>
     <value>true</value>
     <description>Indicates if persistency of job status information is

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Tue Apr 24 19:05:09 2012
@@ -268,9 +268,11 @@
 
 <property>
   <name>io.compression.codecs</name>
-  <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec</value>
-  <description>A list of the compression codec classes that can be used 
-               for compression/decompression.</description>
+  <value></value>
+  <description>A comma-separated list of the compression codec classes that can
+  be used for compression/decompression. In addition to any classes specified
+  with this property (which take precedence), codec classes on the classpath
+  are discovered using a Java ServiceLoader.</description>
 </property>
 
 <property>

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm Tue Apr 24 19:05:09 2012
@@ -314,8 +314,6 @@ Deprecated Properties
 *---+---+
 |mapred.map.child.log.level | mapreduce.map.log.level
 *---+---+
-|mapred.map.child.ulimit | mapreduce.map.ulimit
-*---+---+
 |mapred.map.max.attempts | mapreduce.map.maxattempts
 *---+---+
 |mapred.map.output.compression.codec | mapreduce.map.output.compress.codec
@@ -378,8 +376,6 @@ Deprecated Properties
 *---+---+
 |mapred.reduce.child.log.level | mapreduce.reduce.log.level
 *---+---+
-|mapred.reduce.child.ulimit | mapreduce.reduce.ulimit
-*---+---+
 |mapred.reduce.max.attempts | mapreduce.reduce.maxattempts
 *---+---+
 |mapred.reduce.parallel.copies | mapreduce.reduce.shuffle.parallelcopies

Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1327258-1329943

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Tue Apr 24 19:05:09 2012
@@ -756,7 +756,8 @@ public class TestCodec {
 
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
-    conf.setBoolean("hadoop.native.lib", false);
+    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
+                    false);
     assertFalse("ZlibFactory is using native libs against request",
                 ZlibFactory.isNativeZlibLoaded(conf));
 

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java Tue Apr 24 19:05:09 2012
@@ -101,6 +101,12 @@ public class TestCodecFactory extends Te
     }
   }
   
+  private static class NewGzipCodec extends BaseCodec {
+    public String getDefaultExtension() {
+      return ".gz";
+    }
+  }
+  
   /**
    * Returns a factory for a given set of codecs
    * @param classes the codec classes to include
@@ -167,32 +173,43 @@ public class TestCodecFactory extends Te
     checkCodec("default factory for deflate codec", DeflateCodec.class, codec);
 
     factory = setClasses(new Class[0]);
+    // gz, bz2, snappy, lz4 are picked up by service loader, but bar isn't
     codec = factory.getCodec(new Path("/tmp/foo.bar"));
-    assertEquals("empty codec bar codec", null, codec);
+    assertEquals("empty factory bar codec", null, codec);
     codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName());
-    assertEquals("empty codec bar codec", null, codec);
+    assertEquals("empty factory bar codec", null, codec);
     
     codec = factory.getCodec(new Path("/tmp/foo.gz"));
-    assertEquals("empty codec gz codec", null, codec);
+    checkCodec("empty factory gz codec", GzipCodec.class, codec);
     codec = factory.getCodecByClassName(GzipCodec.class.getCanonicalName());
-    assertEquals("empty codec gz codec", null, codec);
+    checkCodec("empty factory gz codec", GzipCodec.class, codec);
     
     codec = factory.getCodec(new Path("/tmp/foo.bz2"));
-    assertEquals("empty factory for .bz2", null, codec);
+    checkCodec("empty factory for .bz2", BZip2Codec.class, codec);
     codec = factory.getCodecByClassName(BZip2Codec.class.getCanonicalName());
-    assertEquals("empty factory for bzip2 codec", null, codec);
+    checkCodec("empty factory for bzip2 codec", BZip2Codec.class, codec);
+    
+    codec = factory.getCodec(new Path("/tmp/foo.snappy"));
+    checkCodec("empty factory snappy codec", SnappyCodec.class, codec);
+    codec = factory.getCodecByClassName(SnappyCodec.class.getCanonicalName());
+    checkCodec("empty factory snappy codec", SnappyCodec.class, codec);
+    
+    codec = factory.getCodec(new Path("/tmp/foo.lz4"));
+    checkCodec("empty factory lz4 codec", Lz4Codec.class, codec);
+    codec = factory.getCodecByClassName(Lz4Codec.class.getCanonicalName());
+    checkCodec("empty factory lz4 codec", Lz4Codec.class, codec);
     
     factory = setClasses(new Class[]{BarCodec.class, FooCodec.class, 
                                      FooBarCodec.class});
     codec = factory.getCodec(new Path("/tmp/.foo.bar.gz"));
-    assertEquals("full factory gz codec", null, codec);
+    checkCodec("full factory gz codec", GzipCodec.class, codec);
     codec = factory.getCodecByClassName(GzipCodec.class.getCanonicalName());
-    assertEquals("full codec gz codec", null, codec);
+    checkCodec("full codec gz codec", GzipCodec.class, codec);
      
     codec = factory.getCodec(new Path("/tmp/foo.bz2"));
-    assertEquals("full factory for .bz2", null, codec);
+    checkCodec("full factory for .bz2", BZip2Codec.class, codec);
     codec = factory.getCodecByClassName(BZip2Codec.class.getCanonicalName());
-    assertEquals("full codec bzip2 codec", null, codec);
+    checkCodec("full codec bzip2 codec", BZip2Codec.class, codec);
 
     codec = factory.getCodec(new Path("/tmp/foo.bar"));
     checkCodec("full factory bar codec", BarCodec.class, codec);
@@ -220,5 +237,11 @@ public class TestCodecFactory extends Te
     checkCodec("full factory foo codec", FooCodec.class, codec);
     codec = factory.getCodecByName("FOO");
     checkCodec("full factory foo codec", FooCodec.class, codec);
+    
+    factory = setClasses(new Class[]{NewGzipCodec.class});
+    codec = factory.getCodec(new Path("/tmp/foo.gz"));
+    checkCodec("overridden factory for .gz", NewGzipCodec.class, codec);
+    codec = factory.getCodecByClassName(NewGzipCodec.class.getCanonicalName());
+    checkCodec("overridden factory for gzip codec", NewGzipCodec.class, codec);
   }
 }

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java Tue Apr 24 19:05:09 2012
@@ -33,6 +33,7 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -237,7 +238,8 @@ public class TestTFileSeqFileComparison 
     public SeqFileAppendable(FileSystem fs, Path path, int osBufferSize,
         String compress, int minBlkSize) throws IOException {
       Configuration conf = new Configuration();
-      conf.setBoolean("hadoop.native.lib", true);
+      conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
+                      true);
 
       CompressionCodec codec = null;
       if ("lzo".equals(compress)) {

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java Tue Apr 24 19:05:09 2012
@@ -25,7 +25,6 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.net.NetUtils;
 
@@ -99,7 +98,7 @@ public class TestIPC {
     }
 
     @Override
-    public Writable call(RpcKind rpcKind, String protocol, Writable param,
+    public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param,
         long receiveTime) throws IOException {
       if (sleep) {
         // sleep a bit

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java Tue Apr 24 19:05:09 2012
@@ -30,7 +30,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.net.NetUtils;
 
 /**
@@ -73,7 +72,7 @@ public class TestIPCServerResponder exte
     }
 
     @Override
-    public Writable call(RpcKind rpcKind, String protocol, Writable param,
+    public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param,
         long receiveTime) throws IOException {
       if (sleep) {
         try {

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java Tue Apr 24 19:05:09 2012
@@ -23,7 +23,6 @@ import java.net.InetSocketAddress;
 import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.TestProtoBufRpc.PBServerImpl;
 import org.apache.hadoop.ipc.TestProtoBufRpc.TestRpcService;
 import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto;
@@ -178,9 +177,9 @@ public class TestMultipleProtocolServer 
     // create a server with two handlers
     server = RPC.getServer(Foo0.class,
                               new Foo0Impl(), ADDRESS, 0, 2, false, conf, null);
-    server.addProtocol(RpcKind.RPC_WRITABLE, Foo1.class, new Foo1Impl());
-    server.addProtocol(RpcKind.RPC_WRITABLE, Bar.class, new BarImpl());
-    server.addProtocol(RpcKind.RPC_WRITABLE, Mixin.class, new BarImpl());
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, Foo1.class, new Foo1Impl());
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, Bar.class, new BarImpl());
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, Mixin.class, new BarImpl());
     
     
     // Add Protobuf server
@@ -189,7 +188,7 @@ public class TestMultipleProtocolServer 
         new PBServerImpl();
     BlockingService service = TestProtobufRpcProto
         .newReflectiveBlockingService(pbServerImpl);
-    server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService.class,
+    server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService.class,
         service);
     server.start();
     addr = NetUtils.getConnectAddress(server);

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java Tue Apr 24 19:05:09 2012
@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto;
@@ -122,7 +121,7 @@ public class TestProtoBufRpc {
     BlockingService service2 = TestProtobufRpc2Proto
         .newReflectiveBlockingService(server2Impl);
     
-    server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService2.class,
+    server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService2.class,
         service2);
     server.start();
   }

Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java Tue Apr 24 19:05:09 2012
@@ -31,7 +31,6 @@ import junit.framework.Assert;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
@@ -134,7 +133,7 @@ public class TestRPCCompatibility {
     TestImpl1 impl = new TestImpl1();
     server = RPC.getServer(TestProtocol1.class,
                             impl, ADDRESS, 0, 2, false, conf, null);
-    server.addProtocol(RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
     server.start();
     addr = NetUtils.getConnectAddress(server);
 
@@ -201,7 +200,7 @@ System.out.println("echo int is NOT supp
     TestImpl1 impl = new TestImpl1();
     server = RPC.getServer(TestProtocol1.class,
                               impl, ADDRESS, 0, 2, false, conf, null);
-    server.addProtocol(RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
     server.start();
     addr = NetUtils.getConnectAddress(server);
 
@@ -222,7 +221,7 @@ System.out.println("echo int is NOT supp
     TestImpl2 impl = new TestImpl2();
     server = RPC.getServer(TestProtocol2.class,
                              impl, ADDRESS, 0, 2, false, conf, null);
-    server.addProtocol(RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
     server.start();
     addr = NetUtils.getConnectAddress(server);
 
@@ -316,11 +315,11 @@ System.out.println("echo int is NOT supp
     TestProtocol2 proxy = RPC.getProxy(TestProtocol2.class,
         TestProtocol2.versionID, addr, conf);
     boolean supported = RpcClientUtil.isMethodSupported(proxy,
-        TestProtocol2.class, RpcKind.RPC_WRITABLE,
+        TestProtocol2.class, RPC.RpcKind.RPC_WRITABLE,
         RPC.getProtocolVersion(TestProtocol2.class), "echo");
     Assert.assertTrue(supported);
     supported = RpcClientUtil.isMethodSupported(proxy,
-        TestProtocol2.class, RpcKind.RPC_PROTOCOL_BUFFER,
+        TestProtocol2.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER,
         RPC.getProtocolVersion(TestProtocol2.class), "echo");
     Assert.assertFalse(supported);
   }
@@ -334,7 +333,7 @@ System.out.println("echo int is NOT supp
     TestImpl1 impl = new TestImpl1();
     server = RPC.getServer(TestProtocol1.class, impl, ADDRESS, 0, 2, false,
         conf, null);
-    server.addProtocol(RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
+    server.addProtocol(RPC.RpcKind.RPC_WRITABLE, TestProtocol0.class, impl);
     server.start();
 
     ProtocolMetaInfoServerSideTranslatorPB xlator = 
@@ -343,13 +342,13 @@ System.out.println("echo int is NOT supp
     GetProtocolSignatureResponseProto resp = xlator.getProtocolSignature(
         null,
         createGetProtocolSigRequestProto(TestProtocol1.class,
-            RpcKind.RPC_PROTOCOL_BUFFER));
+            RPC.RpcKind.RPC_PROTOCOL_BUFFER));
     //No signatures should be found
     Assert.assertEquals(0, resp.getProtocolSignatureCount());
     resp = xlator.getProtocolSignature(
         null,
         createGetProtocolSigRequestProto(TestProtocol1.class,
-            RpcKind.RPC_WRITABLE));
+            RPC.RpcKind.RPC_WRITABLE));
     Assert.assertEquals(1, resp.getProtocolSignatureCount());
     ProtocolSignatureProto sig = resp.getProtocolSignatureList().get(0);
     Assert.assertEquals(TestProtocol1.versionID, sig.getVersion());
@@ -366,7 +365,7 @@ System.out.println("echo int is NOT supp
   }
   
   private GetProtocolSignatureRequestProto createGetProtocolSigRequestProto(
-      Class<?> protocol, RpcKind rpcKind) {
+      Class<?> protocol, RPC.RpcKind rpcKind) {
     GetProtocolSignatureRequestProto.Builder builder = 
         GetProtocolSignatureRequestProto.newBuilder();
     builder.setProtocol(protocol.getName());