You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2011/09/08 03:39:20 UTC

svn commit: r1166495 [2/2] - in /hadoop/common/branches/HDFS-1623/hadoop-common-project: ./ hadoop-auth-examples/ hadoop-auth-examples/src/ hadoop-auth-examples/src/main/ hadoop-auth-examples/src/main/java/ hadoop-auth-examples/src/main/java/org/ hadoo...

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Thu Sep  8 01:39:07 2011
@@ -27,6 +27,9 @@ import java.lang.reflect.InvocationTarge
 
 import java.net.InetSocketAddress;
 import java.io.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -35,6 +38,7 @@ import javax.net.SocketFactory;
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -47,10 +51,46 @@ import org.apache.hadoop.conf.*;
 public class WritableRpcEngine implements RpcEngine {
   private static final Log LOG = LogFactory.getLog(RPC.class);
   
+ 
+  /**
+   * Get all superInterfaces that extend VersionedProtocol
+   * @param childInterfaces
+   * @return the super interfaces that extend VersionedProtocol
+   */
+  private static Class<?>[] getSuperInterfaces(Class<?>[] childInterfaces) {
+    List<Class<?>> allInterfaces = new ArrayList<Class<?>>();
+
+    for (Class<?> childInterface : childInterfaces) {
+      if (VersionedProtocol.class.isAssignableFrom(childInterface)) {
+          allInterfaces.add(childInterface);
+          allInterfaces.addAll(
+              Arrays.asList(
+                  getSuperInterfaces(childInterface.getInterfaces())));
+      } else {
+        LOG.warn("Interface " + childInterface +
+              " ignored because it does not extend VersionedProtocol");
+      }
+    }
+    return (Class<?>[]) allInterfaces.toArray(new Class[allInterfaces.size()]);
+  }
+  
+  /**
+   * Get all interfaces that the given protocol implements or extends
+   * which are assignable from VersionedProtocol.
+   */
+  private static Class<?>[] getProtocolInterfaces(Class<?> protocol) {
+    Class<?>[] interfaces  = protocol.getInterfaces();
+    return getSuperInterfaces(interfaces);
+  }
+
+  
   //writableRpcVersion should be updated if there is a change
   //in format of the rpc messages.
-  public static long writableRpcVersion = 1L;
+  
+  // 2L - added declared class to Invocation
+  public static final long writableRpcVersion = 2L; 
 
+  
   /** A method invocation, including the method name and its parameters.*/
   private static class Invocation implements Writable, Configurable {
     private String methodName;
@@ -59,11 +99,13 @@ public class WritableRpcEngine implement
     private Configuration conf;
     private long clientVersion;
     private int clientMethodsHash;
+    private String declaringClassProtocolName;
     
     //This could be different from static writableRpcVersion when received
     //at server, if client is using a different version.
     private long rpcVersion;
 
+    @SuppressWarnings("unused") // called when deserializing an invocation
     public Invocation() {}
 
     public Invocation(Method method, Object[] parameters) {
@@ -88,6 +130,8 @@ public class WritableRpcEngine implement
         this.clientMethodsHash = ProtocolSignature.getFingerprint(method
             .getDeclaringClass().getMethods());
       }
+      this.declaringClassProtocolName = 
+          RPC.getProtocolName(method.getDeclaringClass());
     }
 
     /** The name of the method invoked. */
@@ -103,6 +147,7 @@ public class WritableRpcEngine implement
       return clientVersion;
     }
 
+    @SuppressWarnings("unused")
     private int getClientMethodsHash() {
       return clientMethodsHash;
     }
@@ -115,8 +160,10 @@ public class WritableRpcEngine implement
       return rpcVersion;
     }
 
+    @SuppressWarnings("deprecation")
     public void readFields(DataInput in) throws IOException {
       rpcVersion = in.readLong();
+      declaringClassProtocolName = UTF8.readString(in);
       methodName = UTF8.readString(in);
       clientVersion = in.readLong();
       clientMethodsHash = in.readInt();
@@ -124,13 +171,16 @@ public class WritableRpcEngine implement
       parameterClasses = new Class[parameters.length];
       ObjectWritable objectWritable = new ObjectWritable();
       for (int i = 0; i < parameters.length; i++) {
-        parameters[i] = ObjectWritable.readObject(in, objectWritable, this.conf);
+        parameters[i] = 
+            ObjectWritable.readObject(in, objectWritable, this.conf);
         parameterClasses[i] = objectWritable.getDeclaredClass();
       }
     }
 
+    @SuppressWarnings("deprecation")
     public void write(DataOutput out) throws IOException {
       out.writeLong(rpcVersion);
+      UTF8.writeString(out, declaringClassProtocolName);
       UTF8.writeString(out, methodName);
       out.writeLong(clientVersion);
       out.writeInt(clientMethodsHash);
@@ -273,30 +323,161 @@ public class WritableRpcEngine implement
 
   /** Construct a server for a protocol implementation instance listening on a
    * port and address. */
-  public Server getServer(Class<?> protocol,
-                          Object instance, String bindAddress, int port,
-                          int numHandlers, int numReaders, int queueSizePerHandler,
-                          boolean verbose, Configuration conf,
+  public RPC.Server getServer(Class<?> protocolClass,
+                      Object protocolImpl, String bindAddress, int port,
+                      int numHandlers, int numReaders, int queueSizePerHandler,
+                      boolean verbose, Configuration conf,
                       SecretManager<? extends TokenIdentifier> secretManager) 
     throws IOException {
-    return new Server(instance, conf, bindAddress, port, numHandlers, 
-        numReaders, queueSizePerHandler, verbose, secretManager);
+    return new Server(protocolClass, protocolImpl, conf, bindAddress, port,
+        numHandlers, numReaders, queueSizePerHandler, verbose, secretManager);
   }
 
+
   /** An RPC Server. */
   public static class Server extends RPC.Server {
-    private Object instance;
     private boolean verbose;
+    
+    /**
+     *  The key in Map
+     */
+    static class ProtoNameVer {
+      final String protocol;
+      final long   version;
+      ProtoNameVer(String protocol, long ver) {
+        this.protocol = protocol;
+        this.version = ver;
+      }
+      @Override
+      public boolean equals(Object o) {
+        if (o == null) 
+          return false;
+        if (this == o) 
+          return true;
+        if (! (o instanceof ProtoNameVer))
+          return false;
+        ProtoNameVer pv = (ProtoNameVer) o;
+        return ((pv.protocol.equals(this.protocol)) && 
+            (pv.version == this.version));     
+      }
+      @Override
+      public int hashCode() {
+        return protocol.hashCode() * 37 + (int) version;    
+      }
+    }
+    
+    /**
+     * The value in map
+     */
+    static class ProtoClassProtoImpl {
+      final Class<?> protocolClass;
+      final Object protocolImpl; 
+      ProtoClassProtoImpl(Class<?> protocolClass, Object protocolImpl) {
+        this.protocolClass = protocolClass;
+        this.protocolImpl = protocolImpl;
+      }
+    }
+    
+    private Map<ProtoNameVer, ProtoClassProtoImpl> protocolImplMap = 
+        new HashMap<ProtoNameVer, ProtoClassProtoImpl>(10);
+    
+    // Register  protocol and its impl for rpc calls
+    private void registerProtocolAndImpl(Class<?> protocolClass, 
+        Object protocolImpl) throws IOException {
+      String protocolName = RPC.getProtocolName(protocolClass);
+      VersionedProtocol vp = (VersionedProtocol) protocolImpl;
+      long version;
+      try {
+        version = vp.getProtocolVersion(protocolName, 0);
+      } catch (Exception ex) {
+        LOG.warn("Protocol "  + protocolClass + 
+             " NOT registered as getProtocolVersion throws exception ");
+        return;
+      }
+      protocolImplMap.put(new ProtoNameVer(protocolName, version),
+          new ProtoClassProtoImpl(protocolClass, protocolImpl)); 
+      LOG.info("ProtocolImpl=" + protocolImpl.getClass().getName() + 
+          " protocolClass=" + protocolClass.getName() + " version=" + version);
+    }
+    
+    private static class VerProtocolImpl {
+      final long version;
+      final ProtoClassProtoImpl protocolTarget;
+      VerProtocolImpl(long ver, ProtoClassProtoImpl protocolTarget) {
+        this.version = ver;
+        this.protocolTarget = protocolTarget;
+      }
+    }
+    
+    
+    @SuppressWarnings("unused") // will be useful later.
+    private VerProtocolImpl[] getSupportedProtocolVersions(
+        String protocolName) {
+      VerProtocolImpl[] resultk = new  VerProtocolImpl[protocolImplMap.size()];
+      int i = 0;
+      for (Map.Entry<ProtoNameVer, ProtoClassProtoImpl> pv :
+                                        protocolImplMap.entrySet()) {
+        if (pv.getKey().protocol.equals(protocolName)) {
+          resultk[i++] = 
+              new VerProtocolImpl(pv.getKey().version, pv.getValue());
+        }
+      }
+      if (i == 0) {
+        return null;
+      }
+      VerProtocolImpl[] result = new VerProtocolImpl[i];
+      System.arraycopy(resultk, 0, result, 0, i);
+      return result;
+    }
+    
+    private VerProtocolImpl getHighestSupportedProtocol(String protocolName) {    
+      Long highestVersion = 0L;
+      ProtoClassProtoImpl highest = null;
+      for (Map.Entry<ProtoNameVer, ProtoClassProtoImpl> pv : protocolImplMap
+          .entrySet()) {
+        if (pv.getKey().protocol.equals(protocolName)) {
+          if ((highest == null) || (pv.getKey().version > highestVersion)) {
+            highest = pv.getValue();
+            highestVersion = pv.getKey().version;
+          } 
+        }
+      }
+      if (highest == null) {
+        return null;
+      }
+      return new VerProtocolImpl(highestVersion,  highest);   
+    }
+ 
 
     /** Construct an RPC server.
      * @param instance the instance whose methods will be called
      * @param conf the configuration to use
      * @param bindAddress the address to bind on to listen for connection
      * @param port the port to listen for connections on
+     * 
+     * @deprecated Use #Server(Class, Object, Configuration, String, int)
+     *    
+     */
+    @Deprecated
+    public Server(Object instance, Configuration conf, String bindAddress,
+        int port) 
+      throws IOException {
+      this(null, instance, conf,  bindAddress, port);
+    }
+    
+    
+    /** Construct an RPC server.
+     * @param protocol class
+     * @param instance the instance whose methods will be called
+     * @param conf the configuration to use
+     * @param bindAddress the address to bind on to listen for connection
+     * @param port the port to listen for connections on
      */
-    public Server(Object instance, Configuration conf, String bindAddress, int port) 
+    public Server(Class<?> protocolClass, Object protocolImpl, 
+        Configuration conf, String bindAddress, int port) 
       throws IOException {
-      this(instance, conf,  bindAddress, port, 1, -1, -1, false, null);
+      this(protocolClass, protocolImpl, conf,  bindAddress, port, 1, -1, -1,
+          false, null);
     }
     
     private static String classNameBase(String className) {
@@ -307,35 +488,103 @@ public class WritableRpcEngine implement
       return names[names.length-1];
     }
     
+    
     /** Construct an RPC server.
-     * @param instance the instance whose methods will be called
+     * @param protocolImpl the instance whose methods will be called
+     * @param conf the configuration to use
+     * @param bindAddress the address to bind on to listen for connection
+     * @param port the port to listen for connections on
+     * @param numHandlers the number of method handler threads to run
+     * @param verbose whether each call should be logged
+     * 
+     * @deprecated use Server#Server(Class, Object, 
+     *      Configuration, String, int, int, int, int, boolean, SecretManager)
+     */
+    @Deprecated
+    public Server(Object protocolImpl, Configuration conf, String bindAddress,
+        int port, int numHandlers, int numReaders, int queueSizePerHandler,
+        boolean verbose, SecretManager<? extends TokenIdentifier> secretManager) 
+            throws IOException {
+       this(null, protocolImpl,  conf,  bindAddress,   port,
+                   numHandlers,  numReaders,  queueSizePerHandler,  verbose, 
+                   secretManager);
+   
+    }
+    
+    /** Construct an RPC server.
+     * @param protocolClass - the protocol being registered
+     *     can be null for compatibility with old usage (see below for details)
+     * @param protocolImpl the protocol impl that will be called
      * @param conf the configuration to use
      * @param bindAddress the address to bind on to listen for connection
      * @param port the port to listen for connections on
      * @param numHandlers the number of method handler threads to run
      * @param verbose whether each call should be logged
      */
-    public Server(Object instance, Configuration conf, String bindAddress,  int port,
-                  int numHandlers, int numReaders, int queueSizePerHandler, boolean verbose, 
-                  SecretManager<? extends TokenIdentifier> secretManager) 
+    public Server(Class<?> protocolClass, Object protocolImpl,
+        Configuration conf, String bindAddress,  int port,
+        int numHandlers, int numReaders, int queueSizePerHandler, 
+        boolean verbose, SecretManager<? extends TokenIdentifier> secretManager) 
         throws IOException {
       super(bindAddress, port, Invocation.class, numHandlers, numReaders,
           queueSizePerHandler, conf,
-          classNameBase(instance.getClass().getName()), secretManager);
-      this.instance = instance;
+          classNameBase(protocolImpl.getClass().getName()), secretManager);
+
       this.verbose = verbose;
+      
+      
+      Class<?>[] protocols;
+      if (protocolClass == null) { // derive protocol from impl
+        /*
+         * In order to remain compatible with the old usage where a single
+         * target protocolImpl is suppled for all protocol interfaces, and
+         * the protocolImpl is derived from the protocolClass(es) 
+         * we register all interfaces extended by the protocolImpl
+         */
+        protocols = getProtocolInterfaces(protocolImpl.getClass());
+
+      } else {
+        if (!protocolClass.isAssignableFrom(protocolImpl.getClass())) {
+          throw new IOException("protocolClass "+ protocolClass +
+              " is not implemented by protocolImpl which is of class " +
+              protocolImpl.getClass());
+        }
+        // register protocol class and its super interfaces
+        registerProtocolAndImpl(protocolClass, protocolImpl);
+        protocols = getProtocolInterfaces(protocolClass);
+      }
+      for (Class<?> p : protocols) {
+        if (!p.equals(VersionedProtocol.class)) {
+          registerProtocolAndImpl(p, protocolImpl);
+        }
+      }
+
     }
 
-    public Writable call(Class<?> protocol, Writable param, long receivedTime) 
+ 
+    @Override
+    public <PROTO extends VersionedProtocol, IMPL extends PROTO> Server
+      addProtocol(
+        Class<PROTO> protocolClass, IMPL protocolImpl) throws IOException {
+      registerProtocolAndImpl(protocolClass, protocolImpl);
+      return this;
+    }
+    
+    /**
+     * Process a client call
+     * @param protocolName - the protocol name (the class of the client proxy
+     *      used to make calls to the rpc server.
+     * @param param  parameters
+     * @param receivedTime time at which the call receoved (for metrics)
+     * @return the call's return
+     * @throws IOException
+     */
+    public Writable call(String protocolName, Writable param, long receivedTime) 
     throws IOException {
       try {
         Invocation call = (Invocation)param;
         if (verbose) log("Call: " + call);
 
-        Method method = protocol.getMethod(call.getMethodName(),
-                                           call.getParameterClasses());
-        method.setAccessible(true);
-
         // Verify rpc version
         if (call.getRpcVersion() != writableRpcVersion) {
           // Client is using a different version of WritableRpc
@@ -344,25 +593,51 @@ public class WritableRpcEngine implement
                   + call.getRpcVersion() + ", server side version="
                   + writableRpcVersion);
         }
-        
-        //Verify protocol version.
-        //Bypass the version check for VersionedProtocol
-        if (!method.getDeclaringClass().equals(VersionedProtocol.class)) {
-          long clientVersion = call.getProtocolVersion();
-          ProtocolSignature serverInfo = ((VersionedProtocol) instance)
-              .getProtocolSignature(protocol.getCanonicalName(), call
-                  .getProtocolVersion(), call.getClientMethodsHash());
-          long serverVersion = serverInfo.getVersion();
-          if (serverVersion != clientVersion) {
-            LOG.warn("Version mismatch: client version=" + clientVersion
-                + ", server version=" + serverVersion);
-            throw new RPC.VersionMismatch(protocol.getName(), clientVersion,
-                serverVersion);
+
+        long clientVersion = call.getProtocolVersion();
+        final String protoName;
+        ProtoClassProtoImpl protocolImpl;
+        if (call.declaringClassProtocolName.equals(VersionedProtocol.class.getName())) {
+          // VersionProtocol methods are often used by client to figure out
+          // which version of protocol to use.
+          //
+          // Versioned protocol methods should go the protocolName protocol
+          // rather than the declaring class of the method since the
+          // the declaring class is VersionedProtocol which is not 
+          // registered directly.
+          // Send the call to the highest  protocol version
+          protocolImpl = 
+              getHighestSupportedProtocol(protocolName).protocolTarget;
+        } else {
+          protoName = call.declaringClassProtocolName;
+
+          // Find the right impl for the protocol based on client version.
+          ProtoNameVer pv = 
+              new ProtoNameVer(call.declaringClassProtocolName, clientVersion);
+          protocolImpl = protocolImplMap.get(pv);
+          if (protocolImpl == null) { // no match for Protocol AND Version
+             VerProtocolImpl highest = 
+                 getHighestSupportedProtocol(protoName);
+            if (highest == null) {
+              throw new IOException("Unknown protocol: " + protoName);
+            } else { // protocol supported but not the version that client wants
+              throw new RPC.VersionMismatch(protoName, clientVersion,
+                highest.version);
+            }
           }
         }
+        
+
+        // Invoke the protocol method
 
         long startTime = System.currentTimeMillis();
-        Object value = method.invoke(instance, call.getParameters());
+        Method method = 
+            protocolImpl.protocolClass.getMethod(call.getMethodName(),
+            call.getParameterClasses());
+        method.setAccessible(true);
+        rpcDetailedMetrics.init(protocolImpl.protocolClass);
+        Object value = 
+            method.invoke(protocolImpl.protocolImpl, call.getParameters());
         int processingTime = (int) (System.currentTimeMillis() - startTime);
         int qTime = (int) (startTime-receivedTime);
         if (LOG.isDebugEnabled()) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java Thu Sep  8 01:39:07 2011
@@ -132,6 +132,12 @@ public class GangliaContext extends Abst
     StringBuilder sb = new StringBuilder();
     sb.append(contextName);
     sb.append('.');
+
+    if (contextName.equals("jvm") && outRec.getTag("processName") != null) {
+      sb.append(outRec.getTag("processName"));
+      sb.append('.');
+    }
+
     sb.append(recordName);
     sb.append('.');
     int sbBaseLen = sb.length();

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java Thu Sep  8 01:39:07 2011
@@ -20,13 +20,21 @@ package org.apache.hadoop.metrics2.sink.
 
 import java.io.IOException;
 import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
 import java.util.Map;
+import java.util.Set;
 
+import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.impl.MsInfo;
 import org.apache.hadoop.metrics2.util.MetricsCache;
 import org.apache.hadoop.metrics2.util.MetricsCache.Record;
 
@@ -38,8 +46,67 @@ public class GangliaSink30 extends Abstr
 
   public final Log LOG = LogFactory.getLog(this.getClass());
 
+  private static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
+  
   private MetricsCache metricsCache = new MetricsCache();
 
+  // a key with a NULL value means ALL
+  private Map<String,Set<String>> useTagsMap = new HashMap<String,Set<String>>();
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public void init(SubsetConfiguration conf) {
+    super.init(conf);
+
+    conf.setListDelimiter(',');
+    Iterator<String> it = (Iterator<String>) conf.getKeys();
+    while (it.hasNext()) {
+      String propertyName = it.next();
+      if (propertyName.startsWith(TAGS_FOR_PREFIX_PROPERTY_PREFIX)) {
+        String contextName = propertyName.substring(TAGS_FOR_PREFIX_PROPERTY_PREFIX.length());
+        String[] tags = conf.getStringArray(propertyName);
+        boolean useAllTags = false;
+        Set<String> set = null;
+        if (tags.length > 0) {
+          set = new HashSet<String>();
+          for (String tag : tags) {
+            tag = tag.trim();
+            useAllTags |= tag.equals("*");
+            if (tag.length() > 0) {
+              set.add(tag);
+            }
+          }
+          if (useAllTags) {
+            set = null;
+          }
+        }
+        useTagsMap.put(contextName, set);
+      }
+    }
+  }
+
+  @InterfaceAudience.Private
+  public void appendPrefix(MetricsRecord record, StringBuilder sb) {
+    String contextName = record.context();
+    Collection<MetricsTag> tags = record.tags();
+    if (useTagsMap.containsKey(contextName)) {
+      Set<String> useTags = useTagsMap.get(contextName);
+      for (MetricsTag t : tags) {
+        if (useTags == null || useTags.contains(t.name())) {
+
+          // the context is always skipped here because it is always added
+          
+          // the hostname is always skipped to avoid case-mismatches 
+          // from different DNSes.
+
+          if (t.info() != MsInfo.Context && t.info() != MsInfo.Hostname && t.value() != null) {
+            sb.append('.').append(t.name()).append('=').append(t.value());
+          }
+        }
+      }
+    }          
+  }
+  
   @Override
   public void putMetrics(MetricsRecord record) {
     // The method handles both cases whether Ganglia support dense publish
@@ -53,6 +120,8 @@ public class GangliaSink30 extends Abstr
       sb.append('.');
       sb.append(recordName);
 
+      appendPrefix(record, sb);
+      
       String groupName = sb.toString();
       sb.append('.');
       int sbBaseLen = sb.length();

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java Thu Sep  8 01:39:07 2011
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.security;
 
-import org.apache.hadoop.alfredo.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
 import org.apache.hadoop.http.FilterInitializer;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java Thu Sep  8 01:39:07 2011
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.alfredo.util.KerberosName;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 
 import sun.security.krb5.Config;
 import sun.security.krb5.KrbException;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java Thu Sep  8 01:39:07 2011
@@ -158,10 +158,7 @@ public class RunJar {
 
     Runtime.getRuntime().addShutdownHook(new Thread() {
         public void run() {
-          try {
-            FileUtil.fullyDelete(workDir);
-          } catch (IOException e) {
-          }
+          FileUtil.fullyDelete(workDir);
         }
       });
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Thu Sep  8 01:39:07 2011
@@ -318,6 +318,11 @@
 </property>
 
 <property>
+  <name>fs.webhdfs.impl</name>
+  <value>org.apache.hadoop.hdfs.web.WebHdfsFileSystem</value>
+</property>
+
+<property>
   <name>fs.ftp.impl</name>
   <value>org.apache.hadoop.fs.ftp.FTPFileSystem</value>
   <description>The FileSystem for ftp: uris.</description>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep  8 01:39:07 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1162221
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1166484
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java Thu Sep  8 01:39:07 2011
@@ -32,6 +32,7 @@ import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.mortbay.log.Log;
 
 import static org.apache.hadoop.fs.FileSystemTestHelper.*;
 
@@ -62,8 +63,6 @@ public abstract class FSMainOperationsBa
   private static String TEST_DIR_AXX = "test/hadoop/axx";
   private static int numBlocks = 2;
   
-  static  final String LOCAL_FS_ROOT_URI = "file:///tmp/test";
-  
   
   protected static FileSystem fSys;
   
@@ -83,7 +82,7 @@ public abstract class FSMainOperationsBa
     }     
   };
   
-  private static byte[] data = getFileData(numBlocks,
+  protected static final byte[] data = getFileData(numBlocks,
       getDefaultBlockSize());
   
   @Before
@@ -183,7 +182,7 @@ public abstract class FSMainOperationsBa
   
   @Test
   public void testWDAbsolute() throws IOException {
-    Path absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir");
+    Path absoluteDir = new Path(fSys.getUri() + "/test/existingDir");
     fSys.mkdirs(absoluteDir);
     fSys.setWorkingDirectory(absoluteDir);
     Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
@@ -646,7 +645,7 @@ public abstract class FSMainOperationsBa
     writeReadAndDelete(getDefaultBlockSize() * 2);
   }
   
-  private void writeReadAndDelete(int len) throws IOException {
+  protected void writeReadAndDelete(int len) throws IOException {
     Path path = getTestRootPath(fSys, "test/hadoop/file");
     
     fSys.mkdirs(path.getParent());
@@ -768,6 +767,7 @@ public abstract class FSMainOperationsBa
       rename(src, dst, false, false, false, Rename.NONE);
       Assert.fail("Should throw FileNotFoundException");
     } catch (IOException e) {
+      Log.info("XXX", e);
       Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
     }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java Thu Sep  8 01:39:07 2011
@@ -45,7 +45,7 @@ import org.apache.hadoop.fs.Path;
 public abstract class FileSystemContractBaseTest extends TestCase {
   
   protected FileSystem fs;
-  private byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
+  protected byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
   {
     for (int i = 0; i < data.length; i++) {
       data[i] = (byte) (i % 10);
@@ -215,7 +215,7 @@ public abstract class FileSystemContract
     writeReadAndDelete(getBlockSize() * 2);
   }
   
-  private void writeReadAndDelete(int len) throws IOException {
+  protected void writeReadAndDelete(int len) throws IOException {
     Path path = path("/test/hadoop/file");
     
     fs.mkdirs(path.getParent());
@@ -256,7 +256,7 @@ public abstract class FileSystemContract
     assertEquals("Length", data.length, fs.getFileStatus(path).getLen());
     
     try {
-      fs.create(path, false);
+      fs.create(path, false).close();
       fail("Should throw IOException.");
     } catch (IOException e) {
       // Expected

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Thu Sep  8 01:39:07 2011
@@ -17,16 +17,15 @@
  */
 package org.apache.hadoop.fs;
 
-import java.io.DataInputStream;
 import java.io.IOException;
 import java.io.FileNotFoundException;
 import java.net.URI;
+import java.util.Random;
 
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
 import org.junit.Assert;
-
+import static org.junit.Assert.*;
 
 /**
  * Helper class for unit tests.
@@ -143,23 +142,33 @@ public final class FileSystemTestHelper 
     }
   }
   
-  
-  public static void writeFile(FileSystem fSys, Path path,byte b[])
-    throws Exception {
-    FSDataOutputStream out = 
-      fSys.create(path);
-    out.write(b);
-    out.close();
+  static String writeFile(FileSystem fileSys, Path name, int fileSize)
+    throws IOException {
+    final long seed = 0xDEADBEEFL;
+    // Create and write a file that contains three blocks of data
+    FSDataOutputStream stm = fileSys.create(name);
+    byte[] buffer = new byte[fileSize];
+    Random rand = new Random(seed);
+    rand.nextBytes(buffer);
+    stm.write(buffer);
+    stm.close();
+    return new String(buffer);
   }
   
-  public static byte[] readFile(FileSystem fSys, Path path, int len )
-    throws Exception {
-    DataInputStream dis = fSys.open(path);
-    byte[] buffer = new byte[len];
-    IOUtils.readFully(dis, buffer, 0, len);
-    dis.close();
-    return buffer;
+  static String readFile(FileSystem fs, Path name, int buflen) 
+    throws IOException {
+    byte[] b = new byte[buflen];
+    int offset = 0;
+    FSDataInputStream in = fs.open(name);
+    for (int remaining, n;
+        (remaining = b.length - offset) > 0 && (n = in.read(b, offset, remaining)) != -1;
+        offset += n); 
+    assertEquals(offset, Math.min(b.length, in.getPos()));
+    in.close();
+    String s = new String(b, 0, offset);
+    return s;
   }
+
   public static FileStatus containsPath(FileSystem fSys, Path path,
       FileStatus[] dirList)
     throws IOException {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java Thu Sep  8 01:39:07 2011
@@ -18,10 +18,9 @@
 
 package org.apache.hadoop.fs;
 
-import java.net.URI;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import static org.apache.hadoop.fs.FileSystemTestHelper.*;
 import org.apache.hadoop.conf.Configuration;
 import junit.framework.TestCase;
 
@@ -56,13 +55,13 @@ public class TestChecksumFileSystem exte
 
     // Exercise some boundary cases - a divisor of the chunk size
     // the chunk size, 2x chunk size, and +/-1 around these.
-    TestLocalFileSystem.readFile(localFs, testPath, 128);
-    TestLocalFileSystem.readFile(localFs, testPath, 511);
-    TestLocalFileSystem.readFile(localFs, testPath, 512);
-    TestLocalFileSystem.readFile(localFs, testPath, 513);
-    TestLocalFileSystem.readFile(localFs, testPath, 1023);
-    TestLocalFileSystem.readFile(localFs, testPath, 1024);
-    TestLocalFileSystem.readFile(localFs, testPath, 1025);
+    readFile(localFs, testPath, 128);
+    readFile(localFs, testPath, 511);
+    readFile(localFs, testPath, 512);
+    readFile(localFs, testPath, 513);
+    readFile(localFs, testPath, 1023);
+    readFile(localFs, testPath, 1024);
+    readFile(localFs, testPath, 1025);
 
     localFs.delete(localFs.getChecksumFile(testPath), true);
     assertTrue("checksum deleted", !localFs.exists(localFs.getChecksumFile(testPath)));
@@ -74,7 +73,7 @@ public class TestChecksumFileSystem exte
     
     boolean errorRead = false;
     try {
-      TestLocalFileSystem.readFile(localFs, testPath, 1024);
+      readFile(localFs, testPath, 1024);
     }catch(ChecksumException ie) {
       errorRead = true;
     }
@@ -83,7 +82,7 @@ public class TestChecksumFileSystem exte
     //now setting verify false, the read should succeed
     try {
       localFs.setVerifyChecksum(false);
-      String str = TestLocalFileSystem.readFile(localFs, testPath, 1024);
+      String str = readFile(localFs, testPath, 1024).toString();
       assertTrue("read", "testing".equals(str));
     } finally {
       // reset for other tests
@@ -104,13 +103,13 @@ public class TestChecksumFileSystem exte
 
     // Exercise some boundary cases - a divisor of the chunk size
     // the chunk size, 2x chunk size, and +/-1 around these.
-    TestLocalFileSystem.readFile(localFs, testPath, 128);
-    TestLocalFileSystem.readFile(localFs, testPath, 511);
-    TestLocalFileSystem.readFile(localFs, testPath, 512);
-    TestLocalFileSystem.readFile(localFs, testPath, 513);
-    TestLocalFileSystem.readFile(localFs, testPath, 1023);
-    TestLocalFileSystem.readFile(localFs, testPath, 1024);
-    TestLocalFileSystem.readFile(localFs, testPath, 1025);
+    readFile(localFs, testPath, 128);
+    readFile(localFs, testPath, 511);
+    readFile(localFs, testPath, 512);
+    readFile(localFs, testPath, 513);
+    readFile(localFs, testPath, 1023);
+    readFile(localFs, testPath, 1024);
+    readFile(localFs, testPath, 1025);
   }
 
   /**
@@ -140,7 +139,7 @@ public class TestChecksumFileSystem exte
 
     // Now reading the file should fail with a ChecksumException
     try {
-      TestLocalFileSystem.readFile(localFs, testPath, 1024);
+      readFile(localFs, testPath, 1024);
       fail("Did not throw a ChecksumException when reading truncated " +
            "crc file");
     } catch(ChecksumException ie) {
@@ -149,7 +148,7 @@ public class TestChecksumFileSystem exte
     // telling it not to verify checksums, should avoid issue.
     try {
       localFs.setVerifyChecksum(false);
-      String str = TestLocalFileSystem.readFile(localFs, testPath, 1024);
+      String str = readFile(localFs, testPath, 1024).toString();
       assertTrue("read", "testing truncation".equals(str));
     } finally {
       // reset for other tests

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java Thu Sep  8 01:39:07 2011
@@ -29,7 +29,7 @@ public class TestDU extends TestCase {
   final static private File DU_DIR = new File(
       System.getProperty("test.build.data","/tmp"), "dutmp");
 
-  public void setUp() throws IOException {
+  public void setUp() {
       FileUtil.fullyDelete(DU_DIR);
       assertTrue(DU_DIR.mkdirs());
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java Thu Sep  8 01:39:07 2011
@@ -98,7 +98,7 @@ public class TestHardLink {
    * @throws IOException
    */
   @BeforeClass
-  public static void setupClean() throws IOException {
+  public static void setupClean() {
     //delete source and target directories if they exist
     FileUtil.fullyDelete(src);
     FileUtil.fullyDelete(tgt_one);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java Thu Sep  8 01:39:07 2011
@@ -18,6 +18,7 @@
 package org.apache.hadoop.fs;
 
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -208,4 +209,33 @@ public class TestLocalDirAllocator exten
     }
   }
   
+  /** Two buffer dirs. The first dir does not exist & is on a read-only disk; 
+   * The second dir exists & is RW
+   * getLocalPathForWrite with checkAccess set to false should create a parent
+   * directory. With checkAccess true, the directory should not be created.
+   * @throws Exception
+   */
+  public void testLocalPathForWriteDirCreation() throws IOException {
+    try {
+      conf.set(CONTEXT, BUFFER_DIR[0] + "," + BUFFER_DIR[1]);
+      assertTrue(localFs.mkdirs(BUFFER_PATH[1]));
+      BUFFER_ROOT.setReadOnly();
+      Path p1 =
+          dirAllocator.getLocalPathForWrite("p1/x", SMALL_FILE_SIZE, conf);
+      assertTrue(localFs.getFileStatus(p1.getParent()).isDirectory());
+
+      Path p2 =
+          dirAllocator.getLocalPathForWrite("p2/x", SMALL_FILE_SIZE, conf,
+              false);
+      try {
+        localFs.getFileStatus(p2.getParent());
+      } catch (Exception e) {
+        assertEquals(e.getClass(), FileNotFoundException.class);
+      }
+    } finally {
+      Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
+      rmBufferDirs();
+    }
+  }
+  
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Thu Sep  8 01:39:07 2011
@@ -18,37 +18,23 @@
 package org.apache.hadoop.fs;
 
 import org.apache.hadoop.conf.Configuration;
+import static org.apache.hadoop.fs.FileSystemTestHelper.*;
+
 import java.io.*;
-import junit.framework.*;
+
+import static org.junit.Assert.*;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * This class tests the local file system via the FileSystem abstraction.
  */
-public class TestLocalFileSystem extends TestCase {
+public class TestLocalFileSystem {
   private static String TEST_ROOT_DIR
     = System.getProperty("test.build.data","build/test/data/work-dir/localfs");
 
-
-  static void writeFile(FileSystem fs, Path name) throws IOException {
-    FSDataOutputStream stm = fs.create(name);
-    stm.writeBytes("42\n");
-    stm.close();
-  }
-  
-  static String readFile(FileSystem fs, Path name, int buflen) throws IOException {
-    byte[] b = new byte[buflen];
-    int offset = 0;
-    FSDataInputStream in = fs.open(name);
-    for(int remaining, n;
-        (remaining = b.length - offset) > 0 && (n = in.read(b, offset, remaining)) != -1;
-        offset += n); 
-    assertEquals(offset, Math.min(b.length, in.getPos()));
-    in.close();
-
-    String s = new String(b, 0, offset);
-    System.out.println("s=" + s);
-    return s;
-  }
+  private Configuration conf;
+  private FileSystem fileSys;
 
   private void cleanupFile(FileSystem fs, Path name) throws IOException {
     assertTrue(fs.exists(name));
@@ -56,12 +42,18 @@ public class TestLocalFileSystem extends
     assertTrue(!fs.exists(name));
   }
   
+  @Before
+  public void setup() throws IOException {
+    conf = new Configuration();
+    fileSys = FileSystem.getLocal(conf);
+    fileSys.delete(new Path(TEST_ROOT_DIR), true);
+  }
+
   /**
    * Test the capability of setting the working directory.
    */
+  @Test
   public void testWorkingDirectory() throws IOException {
-    Configuration conf = new Configuration();
-    FileSystem fileSys = FileSystem.getLocal(conf);
     Path origDir = fileSys.getWorkingDirectory();
     Path subdir = new Path(TEST_ROOT_DIR, "new");
     try {
@@ -85,7 +77,7 @@ public class TestLocalFileSystem extends
       // create files and manipulate them.
       Path file1 = new Path("file1");
       Path file2 = new Path("sub/file2");
-      writeFile(fileSys, file1);
+      String contents = writeFile(fileSys, file1, 1);
       fileSys.copyFromLocalFile(file1, file2);
       assertTrue(fileSys.exists(file1));
       assertTrue(fileSys.isFile(file1));
@@ -103,11 +95,10 @@ public class TestLocalFileSystem extends
       InputStream stm = fileSys.open(file1);
       byte[] buffer = new byte[3];
       int bytesRead = stm.read(buffer, 0, 3);
-      assertEquals("42\n", new String(buffer, 0, bytesRead));
+      assertEquals(contents, new String(buffer, 0, bytesRead));
       stm.close();
     } finally {
       fileSys.setWorkingDirectory(origDir);
-      fileSys.delete(subdir, true);
     }
   }
 
@@ -115,6 +106,7 @@ public class TestLocalFileSystem extends
    * test Syncable interface on raw local file system
    * @throws IOException
    */
+  @Test
   public void testSyncable() throws IOException {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf).getRawFileSystem();
@@ -148,12 +140,13 @@ public class TestLocalFileSystem extends
     }
   }
   
+  @Test
   public void testCopy() throws IOException {
     Configuration conf = new Configuration();
     LocalFileSystem fs = FileSystem.getLocal(conf);
     Path src = new Path(TEST_ROOT_DIR, "dingo");
     Path dst = new Path(TEST_ROOT_DIR, "yak");
-    writeFile(fs, src);
+    writeFile(fs, src, 1);
     assertTrue(FileUtil.copy(fs, src, fs, dst, true, false, conf));
     assertTrue(!fs.exists(src) && fs.exists(dst));
     assertTrue(FileUtil.copy(fs, dst, fs, src, false, false, conf));
@@ -170,9 +163,12 @@ public class TestLocalFileSystem extends
     try {
       FileUtil.copy(fs, dst, fs, src, true, true, conf);
       fail("Failed to detect existing dir");
-    } catch (IOException e) { }
+    } catch (IOException e) {
+      // Expected
+    }
   }
 
+  @Test
   public void testHomeDirectory() throws IOException {
     Configuration conf = new Configuration();
     FileSystem fileSys = FileSystem.getLocal(conf);
@@ -182,16 +178,18 @@ public class TestLocalFileSystem extends
     assertEquals(home, fsHome);
   }
 
+  @Test
   public void testPathEscapes() throws IOException {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
     Path path = new Path(TEST_ROOT_DIR, "foo%bar");
-    writeFile(fs, path);
+    writeFile(fs, path, 1);
     FileStatus status = fs.getFileStatus(path);
     assertEquals(path.makeQualified(fs), status.getPath());
     cleanupFile(fs, path);
   }
   
+  @Test
   public void testMkdirs() throws IOException {
     Configuration conf = new Configuration();
     LocalFileSystem fs = FileSystem.getLocal(conf);
@@ -199,18 +197,40 @@ public class TestLocalFileSystem extends
     Path test_file = new Path(TEST_ROOT_DIR, "file1");
     assertTrue(fs.mkdirs(test_dir));
    
-    writeFile(fs, test_file);
+    writeFile(fs, test_file, 1);
     // creating dir over a file
     Path bad_dir = new Path(test_file, "another_dir");
     
     try {
       fs.mkdirs(bad_dir);
       fail("Failed to detect existing file in path");
-    } catch (FileAlreadyExistsException e) { }
+    } catch (FileAlreadyExistsException e) { 
+      // Expected
+    }
     
     try {
       fs.mkdirs(null);
       fail("Failed to detect null in mkdir arg");
-    } catch (IllegalArgumentException e) { }
+    } catch (IllegalArgumentException e) {
+      // Expected
+    }
+  }
+
+  /** Test deleting a file, directory, and non-existent path */
+  @Test
+  public void testBasicDelete() throws IOException {
+    Configuration conf = new Configuration();
+    LocalFileSystem fs = FileSystem.getLocal(conf);
+    Path dir1 = new Path(TEST_ROOT_DIR, "dir1");
+    Path file1 = new Path(TEST_ROOT_DIR, "file1");
+    Path file2 = new Path(TEST_ROOT_DIR+"/dir1", "file2");
+    Path file3 = new Path(TEST_ROOT_DIR, "does-not-exist");
+    assertTrue(fs.mkdirs(dir1));
+    writeFile(fs, file1, 1);
+    writeFile(fs, file2, 1);
+    assertFalse("Returned true deleting non-existant path", 
+        fs.delete(file3));
+    assertTrue("Did not delete file", fs.delete(file1));
+    assertTrue("Did not delete non-empty dir", fs.delete(dir1));
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Thu Sep  8 01:39:07 2011
@@ -19,9 +19,9 @@ package org.apache.hadoop.fs;
 
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
+import static org.apache.hadoop.fs.FileSystemTestHelper.*;
 
 import java.io.ByteArrayOutputStream;
-import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
@@ -42,14 +42,6 @@ public class TestTrash extends TestCase 
     new Path(new File(System.getProperty("test.build.data","/tmp")
           ).toURI().toString().replace(' ', '+'), "testTrash");
 
-  protected static Path writeFile(FileSystem fs, Path f) throws IOException {
-    DataOutputStream out = fs.create(f);
-    out.writeBytes("dhruba: " + f);
-    out.close();
-    assertTrue(fs.exists(f));
-    return f;
-  }
-
   protected static Path mkdir(FileSystem fs, Path p) throws IOException {
     assertTrue(fs.mkdirs(p));
     assertTrue(fs.exists(p));
@@ -139,7 +131,7 @@ public class TestTrash extends TestCase 
 
     // Second, create a file in that directory.
     Path myFile = new Path(base, "test/mkdirs/myFile");
-    writeFile(fs, myFile);
+    writeFile(fs, myFile, 10);
 
     // Verify that expunge without Trash directory
     // won't throw Exception
@@ -176,7 +168,7 @@ public class TestTrash extends TestCase 
     }
 
     // Verify that we can recreate the file
-    writeFile(fs, myFile);
+    writeFile(fs, myFile, 10);
 
     // Verify that we succeed in removing the file we re-created
     {
@@ -194,7 +186,7 @@ public class TestTrash extends TestCase 
     }
 
     // Verify that we can recreate the file
-    writeFile(fs, myFile);
+    writeFile(fs, myFile, 10);
     
     // Verify that we succeed in removing the whole directory
     // along with the file inside it.
@@ -234,7 +226,7 @@ public class TestTrash extends TestCase 
     {
         Path toErase = new Path(trashRoot, "toErase");
         int retVal = -1;
-        writeFile(trashRootFs, toErase);
+        writeFile(trashRootFs, toErase, 10);
         try {
           retVal = shell.run(new String[] {"-rm", toErase.toString()});
         } catch (Exception e) {
@@ -265,7 +257,7 @@ public class TestTrash extends TestCase 
 
     // recreate directory and file
     mkdir(fs, myPath);
-    writeFile(fs, myFile);
+    writeFile(fs, myFile, 10);
 
     // remove file first, then remove directory
     {
@@ -316,7 +308,7 @@ public class TestTrash extends TestCase 
     
     // recreate directory and file
     mkdir(fs, myPath);
-    writeFile(fs, myFile);
+    writeFile(fs, myFile, 10);
     
     // Verify that skip trash option really skips the trash for files (rm)
     {
@@ -346,7 +338,7 @@ public class TestTrash extends TestCase 
     
     // recreate directory and file
     mkdir(fs, myPath);
-    writeFile(fs, myFile);
+    writeFile(fs, myFile, 10);
     
     // Verify that skip trash option really skips the trash for rmr
     {
@@ -392,7 +384,7 @@ public class TestTrash extends TestCase 
       for(int i=0;i<num_runs; i++) {
         
         //create file
-        writeFile(fs, myFile);
+        writeFile(fs, myFile, 10);
          
         // delete file
         try {
@@ -452,8 +444,7 @@ public class TestTrash extends TestCase 
         lfs.delete(p, true);
       }
       try {
-        f = writeFile(lfs, f);
-
+        writeFile(lfs, f, 10);
         FileSystem.closeAll();
         FileSystem localFs = FileSystem.get(URI.create("file:///"), conf);
         Trash lTrash = new Trash(localFs, conf);
@@ -515,7 +506,7 @@ public class TestTrash extends TestCase 
     while (true)  {
       // Create a file with a new name
       Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++);
-      writeFile(fs, myFile);
+      writeFile(fs, myFile, 10);
 
       // Delete the file to trash
       String[] args = new String[2];
@@ -606,7 +597,7 @@ public class TestTrash extends TestCase 
     int iters = 1000;
     for(int i=0;i<iters; i++) {
       
-      writeFile(fs, myFile);
+      writeFile(fs, myFile, 10);
       
       start = System.currentTimeMillis();
       

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java Thu Sep  8 01:39:07 2011
@@ -18,9 +18,7 @@
 package org.apache.hadoop.http;
 
 import java.io.IOException;
-import java.io.PrintStream;
 import java.io.PrintWriter;
-import java.net.URLConnection;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.util.Arrays;
@@ -52,6 +50,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.http.HttpServer.QuotingInputFilter.RequestQuoter;
+import org.apache.hadoop.http.resource.JerseyResource;
 import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
 import org.apache.hadoop.security.authorize.AccessControlList;
@@ -59,6 +58,7 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.mockito.Mockito;
+import org.mortbay.util.ajax.JSON;
 
 public class TestHttpServer extends HttpServerFunctionalTest {
   static final Log LOG = LogFactory.getLog(TestHttpServer.class);
@@ -75,7 +75,7 @@ public class TestHttpServer extends Http
                       ) throws ServletException, IOException {
       PrintWriter out = response.getWriter();
       Map<String, String[]> params = request.getParameterMap();
-      SortedSet<String> keys = new TreeSet(params.keySet());
+      SortedSet<String> keys = new TreeSet<String>(params.keySet());
       for(String key: keys) {
         out.print(key);
         out.print(':');
@@ -101,7 +101,7 @@ public class TestHttpServer extends Http
                       HttpServletResponse response
                       ) throws ServletException, IOException {
       PrintWriter out = response.getWriter();
-      SortedSet<String> sortedKeys = new TreeSet();
+      SortedSet<String> sortedKeys = new TreeSet<String>();
       Enumeration<String> keys = request.getParameterNames();
       while(keys.hasMoreElements()) {
         sortedKeys.add(keys.nextElement());
@@ -118,7 +118,6 @@ public class TestHttpServer extends Http
 
   @SuppressWarnings("serial")
   public static class HtmlContentServlet extends HttpServlet {
-    @SuppressWarnings("unchecked")
     @Override
     public void doGet(HttpServletRequest request, 
                       HttpServletResponse response
@@ -131,10 +130,14 @@ public class TestHttpServer extends Http
   }
 
   @BeforeClass public static void setup() throws Exception {
-    server = createTestServer();
+    Configuration conf = new Configuration();
+    conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
+    server = createTestServer(conf);
     server.addServlet("echo", "/echo", EchoServlet.class);
     server.addServlet("echomap", "/echomap", EchoMapServlet.class);
     server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
+    server.addJerseyResourcePackage(
+        JerseyResource.class.getPackage().getName(), "/jersey/*");
     server.start();
     baseUrl = getServerURL(server);
     LOG.info("HTTP server started: "+ baseUrl);
@@ -161,7 +164,8 @@ public class TestHttpServer extends Http
             assertEquals("a:b\nc:d\n",
                          readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
             int serverThreads = server.webServer.getThreadPool().getThreads();
-            assertTrue(serverThreads <= MAX_THREADS);
+            assertTrue("More threads are started than expected, Server Threads count: "
+                    + serverThreads, serverThreads <= MAX_THREADS);
             System.out.println("Number of threads = " + serverThreads +
                 " which is less or equal than the max = " + MAX_THREADS);
           } catch (Exception e) {
@@ -404,4 +408,18 @@ public class TestHttpServer extends Http
         values, parameterValues));
   }
 
+  @SuppressWarnings("unchecked")
+  private static Map<String, Object> parse(String jsonString) {
+    return (Map<String, Object>)JSON.parse(jsonString);
+  }
+
+  @Test public void testJersey() throws Exception {
+    LOG.info("BEGIN testJersey()");
+    final String js = readOutput(new URL(baseUrl, "/jersey/foo?op=bar"));
+    final Map<String, Object> m = parse(js);
+    LOG.info("m=" + m);
+    assertEquals("foo", m.get(JerseyResource.PATH));
+    assertEquals("bar", m.get(JerseyResource.OP));
+    LOG.info("END testJersey()");
+  }
 }

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
            ('svn:mergeinfo' removed)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Thu Sep  8 01:39:07 2011
@@ -50,7 +50,7 @@ public class TestNativeIO {
   }
 
   @Before
-  public void setupTestDir() throws IOException {
+  public void setupTestDir() {
     FileUtil.fullyDelete(TEST_DIR);
     TEST_DIR.mkdirs();
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java Thu Sep  8 01:39:07 2011
@@ -97,7 +97,7 @@ public class TestIPC {
     }
 
     @Override
-    public Writable call(Class<?> protocol, Writable param, long receiveTime)
+    public Writable call(String protocol, Writable param, long receiveTime)
         throws IOException {
       if (sleep) {
         // sleep a bit

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java Thu Sep  8 01:39:07 2011
@@ -72,7 +72,7 @@ public class TestIPCServerResponder exte
     }
 
     @Override
-    public Writable call(Class<?> protocol, Writable param, long receiveTime)
+    public Writable call(String protocol, Writable param, long receiveTime)
         throws IOException {
       if (sleep) {
         try {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java Thu Sep  8 01:39:07 2011
@@ -39,7 +39,7 @@ import org.junit.Test;
 public class TestRPCCompatibility {
   private static final String ADDRESS = "0.0.0.0";
   private static InetSocketAddress addr;
-  private static Server server;
+  private static RPC.Server server;
   private ProtocolProxy<?> proxy;
 
   public static final Log LOG =
@@ -52,10 +52,12 @@ public class TestRPCCompatibility {
     void ping() throws IOException;    
   }
   
-  public interface TestProtocol1 extends TestProtocol0 {
+  public interface TestProtocol1 extends VersionedProtocol, TestProtocol0 {
     String echo(String value) throws IOException;
   }
 
+  @ProtocolInfo(protocolName=
+      "org.apache.hadoop.ipc.TestRPCCompatibility$TestProtocol1")
   public interface TestProtocol2 extends TestProtocol1 {
     int echo(int value)  throws IOException;
   }
@@ -89,11 +91,23 @@ public class TestRPCCompatibility {
   public static class TestImpl1 extends TestImpl0 implements TestProtocol1 {
     @Override
     public String echo(String value) { return value; }
+    @Override
+    public long getProtocolVersion(String protocol,
+        long clientVersion) throws IOException {
+        return TestProtocol1.versionID;
+    }
   }
 
   public static class TestImpl2 extends TestImpl1 implements TestProtocol2 {
     @Override
     public int echo(int value) { return value; }
+
+    @Override
+    public long getProtocolVersion(String protocol,
+        long clientVersion) throws IOException {
+      return TestProtocol2.versionID;
+    }
+
   }
   
   @After
@@ -109,8 +123,10 @@ public class TestRPCCompatibility {
   @Test  // old client vs new server
   public void testVersion0ClientVersion1Server() throws Exception {
     // create a server with two handlers
+    TestImpl1 impl = new TestImpl1();
     server = RPC.getServer(TestProtocol1.class,
-                              new TestImpl1(), ADDRESS, 0, 2, false, conf, null);
+                            impl, ADDRESS, 0, 2, false, conf, null);
+    server.addProtocol(TestProtocol0.class, impl);
     server.start();
     addr = NetUtils.getConnectAddress(server);
 
@@ -172,8 +188,10 @@ public class TestRPCCompatibility {
   @Test // Compatible new client & old server
   public void testVersion2ClientVersion1Server() throws Exception {
     // create a server with two handlers
+    TestImpl1 impl = new TestImpl1();
     server = RPC.getServer(TestProtocol1.class,
-                              new TestImpl1(), ADDRESS, 0, 2, false, conf, null);
+                              impl, ADDRESS, 0, 2, false, conf, null);
+    server.addProtocol(TestProtocol0.class, impl);
     server.start();
     addr = NetUtils.getConnectAddress(server);
 
@@ -190,8 +208,10 @@ public class TestRPCCompatibility {
   @Test // equal version client and server
   public void testVersion2ClientVersion2Server() throws Exception {
     // create a server with two handlers
+    TestImpl2 impl = new TestImpl2();
     server = RPC.getServer(TestProtocol2.class,
-                              new TestImpl2(), ADDRESS, 0, 2, false, conf, null);
+                             impl, ADDRESS, 0, 2, false, conf, null);
+    server.addProtocol(TestProtocol0.class, impl);
     server.start();
     addr = NetUtils.getConnectAddress(server);
 
@@ -250,14 +270,16 @@ public class TestRPCCompatibility {
     assertEquals(hash1, hash2);
   }
   
+  @ProtocolInfo(protocolName=
+      "org.apache.hadoop.ipc.TestRPCCompatibility$TestProtocol1")
   public interface TestProtocol4 extends TestProtocol2 {
-    public static final long versionID = 1L;
+    public static final long versionID = 4L;
     int echo(int value)  throws IOException;
   }
   
   @Test
   public void testVersionMismatch() throws IOException {
-    server = RPC.getServer(TestProtocol2.class, new TestImpl0(), ADDRESS, 0, 2,
+    server = RPC.getServer(TestProtocol2.class, new TestImpl2(), ADDRESS, 0, 2,
         false, conf, null);
     server.start();
     addr = NetUtils.getConnectAddress(server);
@@ -268,7 +290,8 @@ public class TestRPCCompatibility {
       proxy.echo(21);
       fail("The call must throw VersionMismatch exception");
     } catch (IOException ex) {
-      Assert.assertTrue(ex.getMessage().contains("VersionMismatch"));
+      Assert.assertTrue("Expected version mismatch but got " + ex.getMessage(), 
+          ex.getMessage().contains("VersionMismatch"));
     }
   }
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java Thu Sep  8 01:39:07 2011
@@ -26,12 +26,17 @@ import java.net.DatagramPacket;
 import java.net.DatagramSocket;
 import java.net.SocketException;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
@@ -54,6 +59,44 @@ public class TestGangliaMetrics {
       "test.s1rec.S1NumOps",
       "test.s1rec.S1AvgTime" };
 
+  @Test
+  public void testTagsForPrefix() throws Exception {
+    ConfigBuilder cb = new ConfigBuilder()
+      .add("test.sink.ganglia.tagsForPrefix.all", "*")
+      .add("test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, NumActiveSources")
+      .add("test.sink.ganglia.tagsForPrefix.none", "");
+    GangliaSink30 sink = new GangliaSink30();
+    sink.init(cb.subset("test.sink.ganglia"));
+
+    List<MetricsTag> tags = new ArrayList<MetricsTag>();
+    tags.add(new MetricsTag(MsInfo.Context, "all"));
+    tags.add(new MetricsTag(MsInfo.NumActiveSources, "foo"));
+    tags.add(new MetricsTag(MsInfo.NumActiveSinks, "bar"));
+    tags.add(new MetricsTag(MsInfo.NumAllSinks, "haa"));
+    tags.add(new MetricsTag(MsInfo.Hostname, "host"));
+    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
+    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 1, tags, metrics);
+
+    StringBuilder sb = new StringBuilder();
+    sink.appendPrefix(record, sb);
+    assertEquals(".NumActiveSources=foo.NumActiveSinks=bar.NumAllSinks=haa", sb.toString());
+
+    tags.set(0, new MetricsTag(MsInfo.Context, "some"));
+    sb = new StringBuilder();
+    sink.appendPrefix(record, sb);
+    assertEquals(".NumActiveSources=foo.NumActiveSinks=bar", sb.toString());
+
+    tags.set(0, new MetricsTag(MsInfo.Context, "none"));
+    sb = new StringBuilder();
+    sink.appendPrefix(record, sb);
+    assertEquals("", sb.toString());
+
+    tags.set(0, new MetricsTag(MsInfo.Context, "nada"));
+    sb = new StringBuilder();
+    sink.appendPrefix(record, sb);
+    assertEquals("", sb.toString());
+  }
+  
   @Test public void testGangliaMetrics2() throws Exception {
     ConfigBuilder cb = new ConfigBuilder().add("default.period", 10)
         .add("test.sink.gsink30.context", "test") // filter out only "test"

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java Thu Sep  8 01:39:07 2011
@@ -18,7 +18,7 @@ package org.apache.hadoop.security;
 
 
 import junit.framework.TestCase;
-import org.apache.hadoop.alfredo.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
 import org.mockito.Mockito;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java Thu Sep  8 01:39:07 2011
@@ -49,8 +49,7 @@ public class TestRunJar extends TestCase
   }
 
   @After
-  protected void tearDown()
-      throws Exception {
+  protected void tearDown() {
     FileUtil.fullyDelete(TEST_ROOT_DIR);
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/pom.xml?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/pom.xml Thu Sep  8 01:39:07 2011
@@ -29,6 +29,7 @@
 
   <modules>
     <module>hadoop-auth</module>
+    <module>hadoop-auth-examples</module>
     <module>hadoop-common</module>
     <module>hadoop-annotations</module>
   </modules>