You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2013/06/21 08:37:39 UTC

svn commit: r1495297 [11/46] - in /hadoop/common/branches/branch-1-win: ./ bin/ conf/ ivy/ lib/jdiff/ src/c++/libhdfs/docs/ src/c++/libhdfs/tests/conf/ src/contrib/capacity-scheduler/ivy/ src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred...

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/conf/Configuration.java Fri Jun 21 06:37:27 2013
@@ -27,6 +27,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.io.Reader;
 import java.io.Writer;
 import java.net.URL;
@@ -51,6 +52,7 @@ import javax.xml.parsers.DocumentBuilder
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
 import javax.xml.transform.TransformerFactory;
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamResult;
@@ -65,6 +67,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonGenerator;
+import org.w3c.dom.Comment;
 import org.w3c.dom.DOMException;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
@@ -171,10 +174,10 @@ public class Configuration implements It
     new CopyOnWriteArrayList<String>();
   
   /**
-   * Flag to indicate if the storage of resource which updates a key needs 
-   * to be stored for each key
+   * The value reported as the setting resource when a key is set
+   * by code rather than a file resource.
    */
-  private boolean storeResource;
+  static final String UNKNOWN_RESOURCE = "Unknown";
   
   /**
    * Stores the mapping of key to the resource which modifies or loads 
@@ -223,30 +226,10 @@ public class Configuration implements It
    */
   public Configuration(boolean loadDefaults) {
     this.loadDefaults = loadDefaults;
-    if (LOG.isDebugEnabled()) {
-      LOG.debug(StringUtils.stringifyException(new IOException("config()")));
-    }
+    updatingResource = new HashMap<String, String>();
     synchronized(Configuration.class) {
       REGISTRY.put(this, null);
     }
-    this.storeResource = false;
-  }
-  
-  /**
-   * A new configuration with the same settings and additional facility for
-   * storage of resource to each key which loads or updates 
-   * the key most recently
-   * @param other the configuration from which to clone settings
-   * @param storeResource flag to indicate if the storage of resource to 
-   * each key is to be stored
-   */
-  private Configuration(Configuration other, boolean storeResource) {
-    this(other);
-    this.loadDefaults = other.loadDefaults;
-    this.storeResource = storeResource;
-    if (storeResource) {
-      updatingResource = new HashMap<String, String>();
-    }
   }
   
   /** 
@@ -256,24 +239,22 @@ public class Configuration implements It
    */
   @SuppressWarnings("unchecked")
   public Configuration(Configuration other) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug(StringUtils.stringifyException
-                (new IOException("config(config)")));
-    }
-   
-   this.resources = (ArrayList)other.resources.clone();
-   synchronized(other) {
-     if (other.properties != null) {
-       this.properties = (Properties)other.properties.clone();
-     }
-
-     if (other.overlay!=null) {
-       this.overlay = (Properties)other.overlay.clone();
-     }
-   }
-   
+    this.resources = (ArrayList) other.resources.clone();
+    synchronized (other) {
+      if (other.properties != null) {
+        this.properties = (Properties) other.properties.clone();
+      }
+
+      if (other.overlay != null) {
+        this.overlay = (Properties) other.overlay.clone();
+      }
+
+      this.updatingResource = new HashMap<String, String>(
+          other.updatingResource);
+    }
+
     this.finalParameters = new HashSet<String>(other.finalParameters);
-    synchronized(Configuration.class) {
+    synchronized (Configuration.class) {
       REGISTRY.put(this, null);
     }
   }
@@ -437,8 +418,17 @@ public class Configuration implements It
   public void set(String name, String value) {
     getOverlay().setProperty(name, value);
     getProps().setProperty(name, value);
+    this.updatingResource.put(name, UNKNOWN_RESOURCE);
   }
-  
+ 
+  /**
+   * Unset a previously set property.
+   */
+  public synchronized void unset(String name) {
+    getOverlay().remove(name);
+    getProps().remove(name);
+  }
+ 
   /**
    * Sets a property if it is currently unset.
    * @param name the property name
@@ -1063,10 +1053,8 @@ public class Configuration implements It
       loadResources(properties, resources, quietmode);
       if (overlay!= null) {
         properties.putAll(overlay);
-        if (storeResource) {
-          for (Map.Entry<Object,Object> item: overlay.entrySet()) {
-            updatingResource.put((String) item.getKey(), "Unknown");
-          }
+        for (Map.Entry<Object,Object> item: overlay.entrySet()) {
+          updatingResource.put((String) item.getKey(), UNKNOWN_RESOURCE);
         }
       }
     }
@@ -1250,9 +1238,7 @@ public class Configuration implements It
           if (value != null) {
             if (!finalParameters.contains(attr)) {
               properties.setProperty(attr, value);
-              if (storeResource) {
-                updatingResource.put(attr, name.toString());
-              }
+              updatingResource.put(attr, name.toString());
             } else if (!value.equals(properties.getProperty(attr))) {
               LOG.warn(name+":a attempt to override final parameter: "+attr
                      +";  Ignoring.");
@@ -1280,51 +1266,81 @@ public class Configuration implements It
   }
 
   /** 
-   * Write out the non-default properties in this configuration to the give
+   * Write out the non-default properties in this configuration to the given
    * {@link OutputStream}.
    * 
    * @param out the output stream to write to.
    */
   public void writeXml(OutputStream out) throws IOException {
-    Properties properties = getProps();
+    writeXml(new OutputStreamWriter(out));
+  }
+  
+  /**
+   * Write out the non-default properties in this configuration to the given
+   * {@link Writer}.
+   * 
+   * @param out the writer to write to.
+   */
+  public void writeXml(Writer out) throws IOException {
+    Document doc = asXmlDocument();
     try {
-      Document doc =
-        DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
-      Element conf = doc.createElement("configuration");
-      doc.appendChild(conf);
-      conf.appendChild(doc.createTextNode("\n"));
-      for (Enumeration e = properties.keys(); e.hasMoreElements();) {
-        String name = (String)e.nextElement();
-        Object object = properties.get(name);
-        String value = null;
-        if (object instanceof String) {
-          value = (String) object;
-        }else {
-          continue;
-        }
-        Element propNode = doc.createElement("property");
-        conf.appendChild(propNode);
-      
-        Element nameNode = doc.createElement("name");
-        nameNode.appendChild(doc.createTextNode(name));
-        propNode.appendChild(nameNode);
-      
-        Element valueNode = doc.createElement("value");
-        valueNode.appendChild(doc.createTextNode(value));
-        propNode.appendChild(valueNode);
-
-        conf.appendChild(doc.createTextNode("\n"));
-      }
-    
       DOMSource source = new DOMSource(doc);
       StreamResult result = new StreamResult(out);
       TransformerFactory transFactory = TransformerFactory.newInstance();
       Transformer transformer = transFactory.newTransformer();
+      
+      // Important to not hold Configuration log while writing result, since
+      // 'out' may be an HDFS stream which needs to lock this configuration
+      // from another thread.
       transformer.transform(source, result);
-    } catch (Exception e) {
-      throw new RuntimeException(e);
+    } catch (TransformerException te) {
+      throw new IOException(te);
     }
   }
+  
+  /**
+   * Return the XML DOM corresponding to this Configuration.
+   */
+  private synchronized Document asXmlDocument() throws IOException {
+    Document doc;
+    Properties properties = getProps();
+    try {
+      doc =
+        DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
+    } catch (ParserConfigurationException pe) {
+      throw new IOException(pe);
+    }
+    Element conf = doc.createElement("configuration");
+    doc.appendChild(conf);
+    conf.appendChild(doc.createTextNode("\n"));
+    for (Enumeration<Object> e = properties.keys(); e.hasMoreElements();) {
+      String name = (String) e.nextElement();
+      Object object = properties.get(name);
+      String value = null;
+      if (object instanceof String) {
+        value = (String) object;
+      } else {
+        continue;
+      }
+      Element propNode = doc.createElement("property");
+      conf.appendChild(propNode);
+      if (updatingResource != null) {
+        Comment commentNode = doc.createComment("Loaded from "
+            + updatingResource.get(name));
+        propNode.appendChild(commentNode);
+      }
+      Element nameNode = doc.createElement("name");
+      nameNode.appendChild(doc.createTextNode(name));
+      propNode.appendChild(nameNode);
+    
+      Element valueNode = doc.createElement("value");
+      valueNode.appendChild(doc.createTextNode(value));
+      propNode.appendChild(valueNode);
+
+      conf.appendChild(doc.createTextNode("\n"));
+    }
+    return doc;
+  }
 
   /**
    *  Writes out all the parameters and their properties (final and resource) to
@@ -1337,26 +1353,26 @@ public class Configuration implements It
    * @param out the Writer to write to
    * @throws IOException
    */
-  public static void dumpConfiguration(Configuration conf, 
+  public static void dumpConfiguration(Configuration config, 
       Writer out) throws IOException {
-    Configuration config = new Configuration(conf,true);
-    config.reloadConfiguration();
     JsonFactory dumpFactory = new JsonFactory();
     JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out);
     dumpGenerator.writeStartObject();
     dumpGenerator.writeFieldName("properties");
     dumpGenerator.writeStartArray();
     dumpGenerator.flush();
-    for (Map.Entry<Object,Object> item: config.getProps().entrySet()) {
-      dumpGenerator.writeStartObject();
-      dumpGenerator.writeStringField("key", (String) item.getKey());
-      dumpGenerator.writeStringField("value", 
-          config.get((String) item.getKey()));
-      dumpGenerator.writeBooleanField("isFinal",
-          config.finalParameters.contains(item.getKey()));
-      dumpGenerator.writeStringField("resource",
-          config.updatingResource.get(item.getKey()));
-      dumpGenerator.writeEndObject();
+    synchronized (config) {
+      for (Map.Entry<Object,Object> item: config.getProps().entrySet()) {
+        dumpGenerator.writeStartObject();
+        dumpGenerator.writeStringField("key", (String) item.getKey());
+        dumpGenerator.writeStringField("value", 
+            config.get((String) item.getKey()));
+        dumpGenerator.writeBooleanField("isFinal",
+            config.finalParameters.contains(item.getKey()));
+        dumpGenerator.writeStringField("resource",
+            config.updatingResource.get(item.getKey()));
+        dumpGenerator.writeEndObject();
+      }
     }
     dumpGenerator.writeEndArray();
     dumpGenerator.writeEndObject();

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/BufferedFSInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/BufferedFSInputStream.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/BufferedFSInputStream.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/BufferedFSInputStream.java Fri Jun 21 06:37:27 2013
@@ -18,6 +18,8 @@
 package org.apache.hadoop.fs;
 
 import java.io.BufferedInputStream;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
 import java.io.IOException;
 
 
@@ -27,7 +29,7 @@ import java.io.IOException;
 
 
 public class BufferedFSInputStream extends BufferedInputStream
-implements Seekable, PositionedReadable {
+implements Seekable, PositionedReadable, HasFileDescriptor {
   /**
    * Creates a <code>BufferedFSInputStream</code>
    * with the specified buffer size,
@@ -93,4 +95,13 @@ implements Seekable, PositionedReadable 
   public void readFully(long position, byte[] buffer) throws IOException {
     ((FSInputStream)in).readFully(position, buffer);
   }
+
+  @Override
+  public FileDescriptor getFileDescriptor() throws IOException {
+    if (in instanceof HasFileDescriptor) {
+      return ((HasFileDescriptor) in).getFileDescriptor();
+    } else {
+      return null;
+    }
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java Fri Jun 21 06:37:27 2013
@@ -40,6 +40,9 @@ public class CommonConfigurationKeys {
   public static final String HADOOP_SECURITY_AUTHORIZATION =
     "hadoop.security.authorization";
   /** See src/core/core-default.xml */
+  public static final String HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN = 
+      "hadoop.security.instrumentation.requires.admin";
+  /** See src/core/core-default.xml */
   public static final String  HADOOP_SECURITY_SERVICE_USER_NAME_KEY = 
     "hadoop.security.service.user.name.key";
   /** See src/core/core-default.xml */
@@ -47,6 +50,10 @@ public class CommonConfigurationKeys {
     "hadoop.security.token.service.use_ip";
   public static final boolean HADOOP_SECURITY_TOKEN_SERVICE_USE_IP_DEFAULT =
       true;
+  public static final String HADOOP_SECURITY_USE_WEAK_HTTP_CRYPTO_KEY =
+      "hadoop.security.use-weak-http-crypto";
+  public static final boolean HADOOP_SECURITY_USE_WEAK_HTTP_CRYPTO_DEFAULT =
+      false;
   
   public static final String IPC_SERVER_RPC_READ_THREADS_KEY =
                                         "ipc.server.read.threadpool.size";
@@ -64,5 +71,21 @@ public class CommonConfigurationKeys {
   /** Default value for IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY */
   public static final int IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT =
       256 * 1024;
+
+  /** See src/core/core-default.xml */
+  public static final String HADOOP_RELAXED_VERSION_CHECK_KEY =
+      "hadoop.relaxed.worker.version.check";
+  public static final boolean HADOOP_RELAXED_VERSION_CHECK_DEFAULT = false;
+
+  /** See src/core/core-default.xml */
+  public static final String HADOOP_SKIP_VERSION_CHECK_KEY =
+      "hadoop.skip.worker.version.check";
+  public static final boolean HADOOP_SKIP_VERSION_CHECK_DEFAULT = false;
+
+  /** Enable/Disable aliases serving from jetty */
+  public static final String HADOOP_JETTY_LOGS_SERVE_ALIASES =
+    "hadoop.jetty.logs.serve.aliases";
+  public static final boolean DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES =
+    true;
 }
 

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataInputStream.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataInputStream.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataInputStream.java Fri Jun 21 06:37:27 2013
@@ -22,7 +22,7 @@ import java.io.*;
 /** Utility that wraps a {@link FSInputStream} in a {@link DataInputStream}
  * and buffers input through a {@link BufferedInputStream}. */
 public class FSDataInputStream extends DataInputStream
-    implements Seekable, PositionedReadable, Closeable {
+    implements Seekable, PositionedReadable, Closeable, HasFileDescriptor {
 
   public FSDataInputStream(InputStream in)
     throws IOException {
@@ -59,4 +59,15 @@ public class FSDataInputStream extends D
   public boolean seekToNewSource(long targetPos) throws IOException {
     return ((Seekable)in).seekToNewSource(targetPos); 
   }
+
+  @Override
+  public FileDescriptor getFileDescriptor() throws IOException {
+    if (in instanceof HasFileDescriptor) {
+      return ((HasFileDescriptor) in).getFileDescriptor();
+    } else if (in instanceof FileInputStream) {
+      return ((FileInputStream) in).getFD();
+    } else {
+      return null;
+    }
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataOutputStream.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataOutputStream.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSDataOutputStream.java Fri Jun 21 06:37:27 2013
@@ -95,6 +95,8 @@ public class FSDataOutputStream extends 
   public void sync() throws IOException {
     if (wrappedStream instanceof Syncable) {
       ((Syncable)wrappedStream).sync();
+    } else {
+      wrappedStream.flush();
     }
   }
 }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSInputChecker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSInputChecker.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSInputChecker.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FSInputChecker.java Fri Jun 21 06:37:27 2013
@@ -295,12 +295,12 @@ abstract public class FSInputChecker ext
   
   @Override
   public synchronized long getPos() throws IOException {
-    return chunkPos-(count-pos);
+    return chunkPos-Math.max(0L, count - pos);
   }
 
   @Override
   public synchronized int available() throws IOException {
-    return count-pos;
+    return Math.max(0, count - pos);
   }
   
   /**

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileSystem.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileSystem.java Fri Jun 21 06:37:27 2013
@@ -234,11 +234,11 @@ public abstract class FileSystem extends
     String scheme = uri.getScheme();
     String authority = uri.getAuthority();
 
-    if (scheme == null) {                       // no scheme: use default FS
+    if (scheme == null && authority == null) {     // use default FS
       return get(conf);
     }
 
-    if (authority == null) {                       // no authority
+    if (scheme != null && authority == null) {     // no authority
       URI defaultUri = getDefaultUri(conf);
       if (scheme.equals(defaultUri.getScheme())    // if scheme matches default
           && defaultUri.getAuthority() != null) {  // & default has authority
@@ -402,7 +402,7 @@ public abstract class FileSystem extends
       throw new IllegalArgumentException("Invalid start or len parameter");
     }
 
-    if (file.getLen() < start) {
+    if (file.getLen() <= start) {
       return new BlockLocation[0];
 
     }
@@ -669,6 +669,17 @@ public abstract class FileSystem extends
    */
   public abstract FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException;
+
+  /**
+   * Concat existing files together.
+   * @param trg the path to the target destination.
+   * @param psrcs the paths to the sources to use for the concatenation.
+   * @throws IOException
+   */
+  public void concat(final Path trg, final Path [] srcs) throws IOException {
+    throw new UnsupportedOperationException("Not implemented by the " + 
+        getClass().getSimpleName() + " FileSystem implementation");
+  }
   
   /**
    * Get replication.
@@ -1032,11 +1043,12 @@ public abstract class FileSystem extends
         results = listStatus(parentPaths, fp);
         hasGlob[0] = true;
       } else { // last component does not have a pattern
+        // remove the quoting of metachars in a non-regexp expansion
+        String name = unquotePathComponent(components[components.length - 1]);
         // get all the path names
         ArrayList<Path> filteredPaths = new ArrayList<Path>(parentPaths.length);
         for (int i = 0; i < parentPaths.length; i++) {
-          parentPaths[i] = new Path(parentPaths[i],
-            components[components.length - 1]);
+          parentPaths[i] = new Path(parentPaths[i], name);
           if (fp.accept(parentPaths[i])) {
             filteredPaths.add(parentPaths[i]);
           }
@@ -1079,13 +1091,27 @@ public abstract class FileSystem extends
     if (fp.hasPattern()) {
       parents = FileUtil.stat2Paths(listStatus(parents, fp));
       hasGlob[0] = true;
-    } else {
+    } else { // the component does not have a pattern
+      // remove the quoting of metachars in a non-regexp expansion
+      String name = unquotePathComponent(filePattern[level]);
       for (int i = 0; i < parents.length; i++) {
-        parents[i] = new Path(parents[i], filePattern[level]);
+        parents[i] = new Path(parents[i], name);
       }
     }
     return globPathsLevel(parents, filePattern, level + 1, hasGlob);
   }
+  
+  /**
+   * The glob filter builds a regexp per path component.  If the component
+   * does not contain a shell metachar, then it falls back to appending the
+   * raw string to the list of built up paths.  This raw path needs to have
+   * the quoting removed.  Ie. convert all occurances of "\X" to "X"
+   * @param name of the path component
+   * @return the unquoted path component
+   */
+  private String unquotePathComponent(String name) {
+    return name.replaceAll("\\\\(.)", "$1");
+  }
     
   /** Return the current user's home directory in this filesystem.
    * The default implementation returns "/user/$USER/".
@@ -1275,19 +1301,44 @@ public abstract class FileSystem extends
     return getFileStatus(f).getBlockSize();
   }
     
-  /** Return the number of bytes that large input files should be optimally
-   * be split into to minimize i/o time. */
+  /**
+   * Return the number of bytes that large input files should be optimally
+   * be split into to minimize i/o time.
+   * @deprecated use {@link #getDefaultBlockSize(Path)} instead
+   */
+  @Deprecated
   public long getDefaultBlockSize() {
     // default to 32MB: large enough to minimize the impact of seeks
     return getConf().getLong("fs.local.block.size", 32 * 1024 * 1024);
   }
+
+  /**
+   * Return the number of bytes that large input files should be optimally
+   * be split into to minimize i/o time.
+   * @param f path of file
+   * @return the default block size for the path's filesystem
+   */
+  public long getDefaultBlockSize(Path f) {
+    return getDefaultBlockSize();
+  }
     
   /**
    * Get the default replication.
+   * @deprecated use {@link #getDefaultReplication(Path)} instead
    */
+  @Deprecated
   public short getDefaultReplication() { return 1; }
 
   /**
+   * Get the default replication.
+   * @param path of the file
+   * @return default replication for the path's filesystem 
+   */
+  public short getDefaultReplication(Path path) {
+    return getDefaultReplication();
+  }
+
+  /**
    * Return a file status object that represents the path.
    * @param f The path we want information from
    * @return a FileStatus object

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java Fri Jun 21 06:37:27 2013
@@ -193,7 +193,7 @@ public class FileUtil {
     // Check if dest is directory
     if (!dstFS.exists(dst)) {
       throw new IOException("`" + dst +"': specified destination directory " +
-                            "doest not exist");
+                            "does not exist");
     } else {
       FileStatus sdst = dstFS.getFileStatus(dst);
       if (!sdst.isDir()) 
@@ -377,6 +377,8 @@ public class FileUtil {
       } else if (!overwrite) {
         throw new IOException("Target " + dst + " already exists");
       }
+    } else if (dst.toString().isEmpty()) {
+      return checkDest(null, dstFS, new Path(srcName), overwrite);
     }
     return dst;
   }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FilterFileSystem.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FilterFileSystem.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FilterFileSystem.java Fri Jun 21 06:37:27 2013
@@ -114,7 +114,11 @@ public class FilterFileSystem extends Fi
     return fs.append(f, bufferSize, progress);
   }
 
-  /** {@inheritDoc} */
+  @Override
+  public void concat(Path f, Path[] psrcs) throws IOException {
+    fs.concat(f, psrcs);
+  }
+
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShell.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShell.java Fri Jun 21 06:37:27 2013
@@ -22,10 +22,11 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
-import java.text.DecimalFormat;
-import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.TimeZone;
 import java.util.zip.GZIPInputStream;
 
 import org.apache.hadoop.conf.Configuration;
@@ -41,14 +42,14 @@ import org.apache.hadoop.io.WritableComp
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.util.StringUtils;
 
 /** Provide command line access to a FileSystem. */
 public class FsShell extends Configured implements Tool {
 
-  protected FileSystem fs;
+  private FileSystem fs;
   private Trash trash;
   public static final SimpleDateFormat dateForm = 
     new SimpleDateFormat("yyyy-MM-dd HH:mm");
@@ -78,15 +79,22 @@ public class FsShell extends Configured 
   
   protected void init() throws IOException {
     getConf().setQuietMode(true);
-    if (this.fs == null) {
-     this.fs = FileSystem.get(getConf());
+  }
+  
+  protected FileSystem getFS() throws IOException {
+    if (fs == null) {
+      fs = FileSystem.get(getConf());
     }
-    if (this.trash == null) {
-      this.trash = new Trash(getConf());
+    return fs;
+  }
+  
+  protected Trash getTrash() throws IOException {
+    if (trash == null) {
+      trash = new Trash(getConf());
     }
+    return trash;
   }
 
-  
   /**
    * Copies from stdin to the indicated file.
    */
@@ -360,7 +368,9 @@ public class FsShell extends Configured 
     DataOutputBuffer outbuf;
 
     public TextRecordInputStream(FileStatus f) throws IOException {
-      r = new SequenceFile.Reader(fs, f.getPath(), getConf());
+      FileSystem pFS = f == null ? getFS() : f.getPath().getFileSystem(
+          getConf());
+      r = new SequenceFile.Reader(pFS, f.getPath(), getConf());
       key = ReflectionUtils.newInstance(r.getKeyClass().asSubclass(WritableComparable.class),
                                         getConf());
       val = ReflectionUtils.newInstance(r.getValueClass().asSubclass(Writable.class),
@@ -468,11 +478,12 @@ public class FsShell extends Configured 
       System.out.flush();
 
       boolean printWarning = false;
-      FileStatus status = fs.getFileStatus(f);
+      FileSystem pFS = f.getFileSystem(getConf());
+      FileStatus status = pFS.getFileStatus(f);
       long len = status.getLen();
 
       for(boolean done = false; !done; ) {
-        BlockLocation[] locations = fs.getFileBlockLocations(status, 0, len);
+        BlockLocation[] locations = pFS.getFileBlockLocations(status, 0, len);
         int i = 0;
         for(; i < locations.length && 
           locations[i].getHosts().length == rep; i++)
@@ -973,9 +984,10 @@ public class FsShell extends Configured 
     //
     if (argv.length > 3) {
       Path dst = new Path(dest);
-      if (!fs.isDirectory(dst)) {
-        throw new IOException("When copying multiple files, " 
-                              + "destination " + dest + " should be a directory.");
+      FileSystem pFS = dst.getFileSystem(conf);
+      if (!pFS.isDirectory(dst)) {
+        throw new IOException("When copying multiple files, " + "destination "
+            + dest + " should be a directory.");
       }
     }
     //
@@ -1081,15 +1093,15 @@ public class FsShell extends Configured 
     }
   }
   private void expunge() throws IOException {
-    trash.expunge();
-    trash.checkpoint();
+    getTrash().expunge();
+    getTrash().checkpoint();
   }
 
   /**
    * Returns the Trash object associated with this shell.
    */
-  public Path getCurrentTrashDir() {
-    return trash.getCurrentTrashDir();
+  public Path getCurrentTrashDir() throws IOException {
+    return getTrash().getCurrentTrashDir();
   }
 
   /**
@@ -1223,8 +1235,9 @@ public class FsShell extends Configured 
         errors++;
       }
       for(Path path : paths) {
+        FileStatus file = null;
         try {
-          FileStatus file = srcFs.getFileStatus(path);
+          file = srcFs.getFileStatus(path);
           if (file == null) {
             System.err.println(handler.getName() + 
                                ": could not get status for '" + path + "'");
@@ -1236,8 +1249,15 @@ public class FsShell extends Configured 
           String msg = (e.getMessage() != null ? e.getLocalizedMessage() :
             (e.getCause().getMessage() != null ? 
                 e.getCause().getLocalizedMessage() : "null"));
-          System.err.println(handler.getName() + ": could not get status for '"
-                                        + path + "': " + msg.split("\n")[0]);
+          msg = msg.split("\n")[0];
+          if (file == null) {
+            //getFileStatus fails
+            msg = ": could not get status for '" + path + "': " + msg;
+          } else {
+            //other failure
+            msg = ": failed on '" + path + "': " + msg;
+          }
+          System.err.println(handler.getName() + msg);
           errors++;
         }
       }
@@ -1783,6 +1803,7 @@ public class FsShell extends Configured 
       } else if ("-chmod".equals(cmd) || 
                  "-chown".equals(cmd) ||
                  "-chgrp".equals(cmd)) {
+        // Here fs is not used
         exitCode = FsShellPermissions.changePermissions(fs, cmd, argv, i, this);
       } else if ("-ls".equals(cmd)) {
         if (i < argv.length) {

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShellPermissions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShellPermissions.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShellPermissions.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FsShellPermissions.java Fri Jun 21 06:37:27 2013
@@ -22,8 +22,8 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.fs.FsShell.CmdHandler;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.ChmodParser;
+import org.apache.hadoop.fs.permission.FsPermission;
 
 
 /**
@@ -65,13 +65,7 @@ class FsShellPermissions {
       int newperms = pp.applyNewPermission(file);
 
       if (file.getPermission().toShort() != newperms) {
-        try {
-          srcFs.setPermission(file.getPath(), 
-                                new FsPermission((short)newperms));
-        } catch (IOException e) {
-          System.err.println(getName() + ": changing permissions of '" + 
-                             file.getPath() + "':" + e.getMessage().split("\n")[0]);
-        }
+        srcFs.setPermission(file.getPath(), new FsPermission((short)newperms));
       }
     }
   }
@@ -124,13 +118,7 @@ class FsShellPermissions {
                         null : group;
 
       if (newOwner != null || newGroup != null) {
-        try {
-          srcFs.setOwner(file.getPath(), newOwner, newGroup);
-        } catch (IOException e) {
-          System.err.println(getName() + ": changing ownership of '" + 
-                             file.getPath() + "':" + e.getMessage().split("\n")[0]);
-
-        }
+        srcFs.setOwner(file.getPath(), newOwner, newGroup);
       }
     }
   }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HarFileSystem.java Fri Jun 21 06:37:27 2013
@@ -24,11 +24,11 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
-import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -52,7 +52,8 @@ import org.apache.hadoop.util.Progressab
 public class HarFileSystem extends FilterFileSystem {
   public static final int VERSION = 3;
 
-  private static final Map<URI, HarMetaData> harMetaCache = new HashMap<URI, HarMetaData>();
+  private static final Map<URI, HarMetaData> harMetaCache
+    = new ConcurrentHashMap<URI, HarMetaData>();
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -1050,7 +1051,7 @@ public class HarFileSystem extends Filte
       FileStatus archiveStat = fs.getFileStatus(archiveIndexPath);
       archiveIndexTimestamp = archiveStat.getModificationTime();
       LineReader aLin;
-      String retStr = null;
+
       // now start reading the real index file
       for (Store s: stores) {
         read = 0;

Added: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HasFileDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HasFileDescriptor.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HasFileDescriptor.java (added)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/HasFileDescriptor.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Having a FileDescriptor
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface HasFileDescriptor {
+
+  /**
+   * @return the FileDescriptor
+   * @throws IOException
+   */
+  public FileDescriptor getFileDescriptor() throws IOException;
+
+}

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/Path.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/Path.java Fri Jun 21 06:37:27 2013
@@ -162,7 +162,7 @@ public class Path implements Comparable 
     if (path.indexOf("//") != -1) {
       path = path.replace("//", "/");
     }
-    if (path.indexOf("\\") != -1) {	
+    if (Path.WINDOWS && path.indexOf("\\") != -1) {	
       path = path.replace("\\", "/");
     }
     

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java Fri Jun 21 06:37:27 2013
@@ -99,7 +99,7 @@ public class RawLocalFileSystem extends 
   /*******************************************************
    * For open()'s FSInputStream
    *******************************************************/
-  class LocalFSFileInputStream extends FSInputStream {
+  class LocalFSFileInputStream extends FSInputStream implements HasFileDescriptor {
     FileInputStream fis;
     private long position;
 
@@ -168,6 +168,11 @@ public class RawLocalFileSystem extends 
       }
       return value;
     }
+
+    @Override
+    public FileDescriptor getFileDescriptor() throws IOException {
+      return fis.getFD();
+    }
   }
   
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java Fri Jun 21 06:37:27 2013
@@ -477,7 +477,7 @@ public class FTPFileSystem extends FileS
       if (created) {
         String parentDir = parent.toUri().getPath();
         client.changeWorkingDirectory(parentDir);
-        created = created & client.makeDirectory(pathName);
+        created = client.makeDirectory(pathName);
       }
     } else if (isFile(client, absolute)) {
       throw new IOException(String.format(

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPInputStream.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPInputStream.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/ftp/FTPInputStream.java Fri Jun 21 06:37:27 2013
@@ -69,7 +69,7 @@ public class FTPInputStream extends FSIn
     if (byteRead >= 0) {
       pos++;
     }
-    if (stats != null & byteRead >= 0) {
+    if (stats != null && byteRead >= 0) {
       stats.incrementBytesRead(1);
     }
     return byteRead;
@@ -84,7 +84,7 @@ public class FTPInputStream extends FSIn
     if (result > 0) {
       pos += result;
     }
-    if (stats != null & result > 0) {
+    if (stats != null && result > 0) {
       stats.incrementBytesRead(result);
     }
 

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3/S3InputStream.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3/S3InputStream.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3/S3InputStream.java Fri Jun 21 06:37:27 2013
@@ -103,7 +103,7 @@ class S3InputStream extends FSInputStrea
         pos++;
       }
     }
-    if (stats != null & result >= 0) {
+    if (stats != null && result >= 0) {
       stats.incrementBytesRead(1);
     }
     return result;

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java Fri Jun 21 06:37:27 2013
@@ -24,6 +24,7 @@ import java.io.BufferedInputStream;
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
@@ -53,10 +54,7 @@ class Jets3tNativeFileSystemStore implem
             s3Credentials.getSecretAccessKey());
       this.s3Service = new RestS3Service(awsCredentials);
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
     }
     bucket = new S3Bucket(uri.getHost());
   }
@@ -76,10 +74,7 @@ class Jets3tNativeFileSystemStore implem
       }
       s3Service.putObject(bucket, object);
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
     } finally {
       if (in != null) {
         try {
@@ -99,10 +94,7 @@ class Jets3tNativeFileSystemStore implem
       object.setContentLength(0);
       s3Service.putObject(bucket, object);
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
     }
   }
   
@@ -116,10 +108,8 @@ class Jets3tNativeFileSystemStore implem
       if (e.getMessage().contains("ResponseCode=404")) {
         return null;
       }
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
+      return null; //never returned - keep compiler happy
     }
   }
   
@@ -128,13 +118,8 @@ class Jets3tNativeFileSystemStore implem
       S3Object object = s3Service.getObject(bucket, key);
       return object.getDataInputStream();
     } catch (S3ServiceException e) {
-      if ("NoSuchKey".equals(e.getS3ErrorCode())) {
-        return null;
-      }
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(key, e);
+      return null; //never returned - keep compiler happy
     }
   }
   
@@ -145,32 +130,22 @@ class Jets3tNativeFileSystemStore implem
                                             null, byteRangeStart, null);
       return object.getDataInputStream();
     } catch (S3ServiceException e) {
-      if ("NoSuchKey".equals(e.getS3ErrorCode())) {
-        return null;
-      }
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(key, e);
+      return null; //never returned - keep compiler happy
     }
   }
 
   public PartialListing list(String prefix, int maxListingLength)
     throws IOException {
-    return list(prefix, maxListingLength, null);
+    return list(prefix, maxListingLength, null, false);
   }
   
-  public PartialListing list(String prefix, int maxListingLength,
-      String priorLastKey) throws IOException {
+  public PartialListing list(String prefix, int maxListingLength, String priorLastKey,
+      boolean recurse) throws IOException {
 
-    return list(prefix, PATH_DELIMITER, maxListingLength, priorLastKey);
+    return list(prefix, recurse ? null : PATH_DELIMITER, maxListingLength, priorLastKey);
   }
 
-  public PartialListing listAll(String prefix, int maxListingLength,
-      String priorLastKey) throws IOException {
-
-    return list(prefix, null, maxListingLength, priorLastKey);
-  }
 
   private PartialListing list(String prefix, String delimiter,
       int maxListingLength, String priorLastKey) throws IOException {
@@ -191,10 +166,8 @@ class Jets3tNativeFileSystemStore implem
       return new PartialListing(chunk.getPriorLastKey(), fileMetadata,
           chunk.getCommonPrefixes());
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
+      return null; //never returned - keep compiler happy
     }
   }
 
@@ -202,36 +175,27 @@ class Jets3tNativeFileSystemStore implem
     try {
       s3Service.deleteObject(bucket, key);
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(key, e);
     }
   }
   
-  public void rename(String srcKey, String dstKey) throws IOException {
+  public void copy(String srcKey, String dstKey) throws IOException {
     try {
-      s3Service.moveObject(bucket.getName(), srcKey, bucket.getName(),
+      s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
           new S3Object(dstKey), false);
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(srcKey, e);
     }
   }
 
   public void purge(String prefix) throws IOException {
     try {
       S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
-      for (int i = 0; i < objects.length; i++) {
-        s3Service.deleteObject(bucket, objects[i].getKey());
+      for (S3Object object : objects) {
+        s3Service.deleteObject(bucket, object.getKey());
       }
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
     }
   }
 
@@ -240,16 +204,29 @@ class Jets3tNativeFileSystemStore implem
     sb.append(bucket.getName()).append("\n");
     try {
       S3Object[] objects = s3Service.listObjects(bucket);
-      for (int i = 0; i < objects.length; i++) {
-        sb.append(objects[i].getKey()).append("\n");
+      for (S3Object object : objects) {
+        sb.append(object.getKey()).append("\n");
       }
     } catch (S3ServiceException e) {
-      if (e.getCause() instanceof IOException) {
-        throw (IOException) e.getCause();
-      }
-      throw new S3Exception(e);
+      handleServiceException(e);
     }
     System.out.println(sb);
   }
-  
+
+  private void handleServiceException(String key, S3ServiceException e) throws IOException {
+    if ("NoSuchKey".equals(e.getS3ErrorCode())) {
+      throw new FileNotFoundException("Key '" + key + "' does not exist in S3");
+    } else {
+      handleServiceException(e);
+    }
+  }
+
+  private void handleServiceException(S3ServiceException e) throws IOException {
+    if (e.getCause() instanceof IOException) {
+      throw (IOException) e.getCause();
+    }
+    else {
+      throw new S3Exception(e);
+    }
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeFileSystemStore.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeFileSystemStore.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeFileSystemStore.java Fri Jun 21 06:37:27 2013
@@ -42,14 +42,12 @@ interface NativeFileSystemStore {
   InputStream retrieve(String key, long byteRangeStart) throws IOException;
   
   PartialListing list(String prefix, int maxListingLength) throws IOException;
-  PartialListing list(String prefix, int maxListingLength, String priorLastKey)
+  PartialListing list(String prefix, int maxListingLength, String priorLastKey, boolean recursive)
     throws IOException;
-  PartialListing listAll(String prefix, int maxListingLength,
-      String priorLastKey) throws IOException;
   
   void delete(String key) throws IOException;
 
-  void rename(String srcKey, String dstKey) throws IOException;
+  void copy(String srcKey, String dstKey) throws IOException;
   
   /**
    * Delete all keys with the given prefix. Used for testing.

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Fri Jun 21 06:37:27 2013
@@ -61,6 +61,17 @@ import org.apache.hadoop.util.Progressab
  * Unlike {@link org.apache.hadoop.fs.s3.S3FileSystem} this implementation
  * stores files on S3 in their
  * native form so they can be read by other S3 tools.
+ *
+ * A note about directories. S3 of course has no "native" support for them.
+ * The idiom we choose then is: for any directory created by this class,
+ * we use an empty object "#{dirpath}_$folder$" as a marker.
+ * Further, to interoperate with other S3 tools, we also accept the following:
+ *  - an object "#{dirpath}/' denoting a directory marker
+ *  - if there exists any objects with the prefix "#{dirpath}/", then the
+ *    directory is said to exist
+ *  - if both a file with the name of a directory and a marker for that
+ *    directory exists, then the *file masks the directory*, and the directory
+ *    is never returned.
  * </p>
  * @see org.apache.hadoop.fs.s3.S3FileSystem
  */
@@ -85,6 +96,7 @@ public class NativeS3FileSystem extends 
       this.key = key;
     }
     
+    @Override
     public synchronized int read() throws IOException {
       int result = in.read();
       if (result != -1) {
@@ -92,6 +104,7 @@ public class NativeS3FileSystem extends 
       }
       return result;
     }
+    @Override
     public synchronized int read(byte[] b, int off, int len)
       throws IOException {
       
@@ -102,18 +115,23 @@ public class NativeS3FileSystem extends 
       return result;
     }
 
+    @Override
     public void close() throws IOException {
       in.close();
     }
 
+    @Override
     public synchronized void seek(long pos) throws IOException {
       in.close();
+      LOG.info("Opening key '" + key + "' for reading at position '" + pos + "'");
       in = store.retrieve(key, pos);
       this.pos = pos;
     }
+    @Override
     public synchronized long getPos() throws IOException {
       return pos;
     }
+    @Override
     public boolean seekToNewSource(long targetPos) throws IOException {
       return false;
     }
@@ -134,6 +152,7 @@ public class NativeS3FileSystem extends 
       this.conf = conf;
       this.key = key;
       this.backupFile = newBackupFile();
+      LOG.info("OutputStream for key '" + key + "' writing to tempfile '" + this.backupFile + "'");
       try {
         this.digest = MessageDigest.getInstance("MD5");
         this.backupStream = new BufferedOutputStream(new DigestOutputStream(
@@ -168,6 +187,7 @@ public class NativeS3FileSystem extends 
       }
 
       backupStream.close();
+      LOG.info("OutputStream for key '" + key + "' closed. Now beginning upload");
       
       try {
         byte[] md5Hash = digest == null ? null : digest.digest();
@@ -179,7 +199,7 @@ public class NativeS3FileSystem extends 
         super.close();
         closed = true;
       } 
-
+      LOG.info("OutputStream for key '" + key + "' upload complete");
     }
 
     @Override
@@ -191,8 +211,6 @@ public class NativeS3FileSystem extends 
     public void write(byte[] b, int off, int len) throws IOException {
       backupStream.write(b, off, len);
     }
-    
-    
   }
   
   private URI uri;
@@ -236,6 +254,7 @@ public class NativeS3FileSystem extends 
     Map<String, RetryPolicy> methodNameToPolicyMap =
       new HashMap<String, RetryPolicy>();
     methodNameToPolicyMap.put("storeFile", methodPolicy);
+    methodNameToPolicyMap.put("rename", methodPolicy);
     
     return (NativeFileSystemStore)
       RetryProxy.create(NativeFileSystemStore.class, store,
@@ -246,7 +265,11 @@ public class NativeS3FileSystem extends 
     if (!path.isAbsolute()) {
       throw new IllegalArgumentException("Path must be absolute: " + path);
     }
-    return path.toUri().getPath().substring(1); // remove initial slash
+    String ret = path.toUri().getPath().substring(1); // remove initial slash
+    if (ret.endsWith("/") && (ret.indexOf("/") != ret.length() - 1)) {
+      ret = ret.substring(0, ret.length() -1);
+  }
+    return ret;
   }
   
   private static Path keyToPath(String key) {
@@ -261,6 +284,7 @@ public class NativeS3FileSystem extends 
   }
 
   /** This optional operation is not yet supported. */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     throw new IOException("Not supported");
@@ -287,27 +311,41 @@ public class NativeS3FileSystem extends 
   }
 
   @Override
-  public boolean delete(Path f, boolean recursive) throws IOException {
+  public boolean delete(Path f, boolean recurse) throws IOException {
     FileStatus status;
     try {
       status = getFileStatus(f);
     } catch (FileNotFoundException e) {
+      LOG.debug("Delete called for '" + f + "' but file does not exist, so returning false");
       return false;
     }
     Path absolutePath = makeAbsolute(f);
     String key = pathToKey(absolutePath);
     if (status.isDir()) {
-      FileStatus[] contents = listStatus(f);
-      if (!recursive && contents.length > 0) {
-        throw new IOException("Directory " + f.toString() + " is not empty.");
+      if (!recurse && listStatus(f).length > 0) {
+        throw new IOException("Can not delete " + f + " at is a not empty directory and recurse option is false");
       }
-      for (FileStatus p : contents) {
-        if (!delete(p.getPath(), recursive)) {
-          return false;
+
+      createParent(f);
+
+      LOG.debug("Deleting directory '" + f  + "'");
+      String priorLastKey = null;
+      do {
+        PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey, true);
+        for (FileMetadata file : listing.getFiles()) {
+          store.delete(file.getKey());
         }
+        priorLastKey = listing.getPriorLastKey();
+      } while (priorLastKey != null);
+
+      try {
+        store.delete(key + FOLDER_SUFFIX);
+      } catch (FileNotFoundException e) {
+        //this is fine, we don't require a marker
       }
-      store.delete(key + FOLDER_SUFFIX);
     } else {
+      LOG.debug("Deleting file '" + f + "'");
+      createParent(f);
       store.delete(key);
     }
     return true;
@@ -315,7 +353,6 @@ public class NativeS3FileSystem extends 
 
   @Override
   public FileStatus getFileStatus(Path f) throws IOException {
-    
     Path absolutePath = makeAbsolute(f);
     String key = pathToKey(absolutePath);
     
@@ -323,23 +360,28 @@ public class NativeS3FileSystem extends 
       return newDirectory(absolutePath);
     }
     
+    LOG.debug("getFileStatus retrieving metadata for key '" + key + "'");
     FileMetadata meta = store.retrieveMetadata(key);
     if (meta != null) {
+      LOG.debug("getFileStatus returning 'file' for key '" + key + "'");
       return newFile(meta, absolutePath);
     }
     if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) {
+      LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as '"
+          + key + FOLDER_SUFFIX + "' exists");
       return newDirectory(absolutePath);
     }
     
+    LOG.debug("getFileStatus listing key '" + key + "'");
     PartialListing listing = store.list(key, 1);
     if (listing.getFiles().length > 0 ||
         listing.getCommonPrefixes().length > 0) {
+      LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as it has contents");
       return newDirectory(absolutePath);
     }
     
-    throw new FileNotFoundException(absolutePath +
-        ": No such file or directory.");
-    
+    LOG.debug("getFileStatus could not find key '" + key + "'");
+    throw new FileNotFoundException("No such file or directory '" + absolutePath + "'");
   }
 
   @Override
@@ -372,16 +414,20 @@ public class NativeS3FileSystem extends 
     Set<FileStatus> status = new TreeSet<FileStatus>();
     String priorLastKey = null;
     do {
-      PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, 
-          priorLastKey);
+      PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey, false);
       for (FileMetadata fileMetadata : listing.getFiles()) {
         Path subpath = keyToPath(fileMetadata.getKey());
         String relativePath = pathUri.relativize(subpath.toUri()).getPath();
-        if (relativePath.endsWith(FOLDER_SUFFIX)) {
-          status.add(newDirectory(new Path(absolutePath,
-              relativePath.substring(0,
-                  relativePath.indexOf(FOLDER_SUFFIX)))));
-        } else {
+
+        if (fileMetadata.getKey().equals(key + "/")) {
+          // this is just the directory we have been asked to list
+        }
+        else if (relativePath.endsWith(FOLDER_SUFFIX)) {
+          status.add(newDirectory(new Path(
+              absolutePath,
+              relativePath.substring(0, relativePath.indexOf(FOLDER_SUFFIX)))));
+        }
+        else {
           status.add(newFile(fileMetadata, subpath));
         }
       }
@@ -398,7 +444,7 @@ public class NativeS3FileSystem extends 
       return null;
     }
     
-    return status.toArray(new FileStatus[0]);
+    return status.toArray(new FileStatus[status.size()]);
   }
   
   private FileStatus newFile(FileMetadata meta, Path path) {
@@ -432,10 +478,11 @@ public class NativeS3FileSystem extends 
       FileStatus fileStatus = getFileStatus(f);
       if (!fileStatus.isDir()) {
         throw new IOException(String.format(
-            "Can't make directory for path %s since it is a file.", f));
+            "Can't make directory for path '%s' since it is a file.", f));
 
       }
     } catch (FileNotFoundException e) {
+      LOG.debug("Making dir '" + f + "' in S3");
       String key = pathToKey(f) + FOLDER_SUFFIX;
       store.storeEmptyFile(key);    
     }
@@ -444,9 +491,11 @@ public class NativeS3FileSystem extends 
 
   @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
-    if (!exists(f)) {
-      throw new FileNotFoundException(f.toString());
+    FileStatus fs = getFileStatus(f); // will throw if the file doesn't exist
+    if (fs.isDir()) {
+      throw new IOException("'" + f + "' is a directory");
     }
+    LOG.info("Opening '" + f + "' for reading");
     Path absolutePath = makeAbsolute(f);
     String key = pathToKey(absolutePath);
     return new FSDataInputStream(new BufferedFSInputStream(
@@ -456,47 +505,16 @@ public class NativeS3FileSystem extends 
   // rename() and delete() use this method to ensure that the parent directory
   // of the source does not vanish.
   private void createParent(Path path) throws IOException {
-      Path parent = path.getParent();
-      if (parent != null) {
-          String key = pathToKey(makeAbsolute(parent));
-          if (key.length() > 0) {
-              store.storeEmptyFile(key + FOLDER_SUFFIX);
-          }
+    Path parent = path.getParent();
+    if (parent != null) {
+      String key = pathToKey(makeAbsolute(parent));
+      if (key.length() > 0) {
+          store.storeEmptyFile(key + FOLDER_SUFFIX);
       }
+    }
   }
   
-  private boolean existsAndIsFile(Path f) throws IOException {
-    
-    Path absolutePath = makeAbsolute(f);
-    String key = pathToKey(absolutePath);
     
-    if (key.length() == 0) {
-        return false;
-    }
-    
-    FileMetadata meta = store.retrieveMetadata(key);
-    if (meta != null) {
-        // S3 object with given key exists, so this is a file
-        return true;
-    }
-    
-    if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) {
-        // Signifies empty directory
-        return false;
-    }
-    
-    PartialListing listing = store.list(key, 1, null);
-    if (listing.getFiles().length > 0 ||
-        listing.getCommonPrefixes().length > 0) {
-        // Non-empty directory
-        return false;
-    }
-    
-    throw new FileNotFoundException(absolutePath +
-        ": No such file or directory");
-}
-
-
   @Override
   public boolean rename(Path src, Path dst) throws IOException {
 
@@ -507,60 +525,74 @@ public class NativeS3FileSystem extends 
       return false;
     }
 
+    final String debugPreamble = "Renaming '" + src + "' to '" + dst + "' - ";
+
     // Figure out the final destination
     String dstKey;
     try {
-      boolean dstIsFile = existsAndIsFile(dst);
+      boolean dstIsFile = !getFileStatus(dst).isDir();
       if (dstIsFile) {
-        // Attempting to overwrite a file using rename()
+        LOG.debug(debugPreamble + "returning false as dst is an already existing file");
         return false;
       } else {
-        // Move to within the existent directory
+        LOG.debug(debugPreamble + "using dst as output directory");
         dstKey = pathToKey(makeAbsolute(new Path(dst, src.getName())));
       }
     } catch (FileNotFoundException e) {
-      // dst doesn't exist, so we can proceed
+      LOG.debug(debugPreamble + "using dst as output destination");
       dstKey = pathToKey(makeAbsolute(dst));
       try {
         if (!getFileStatus(dst.getParent()).isDir()) {
-          return false; // parent dst is a file
+          LOG.debug(debugPreamble + "returning false as dst parent exists and is a file");
+          return false;
         }
       } catch (FileNotFoundException ex) {
-        return false; // parent dst does not exist
+        LOG.debug(debugPreamble + "returning false as dst parent does not exist");
+        return false;
       }
     }
 
+    boolean srcIsFile;
     try {
-      boolean srcIsFile = existsAndIsFile(src);
-      if (srcIsFile) {
-        store.rename(srcKey, dstKey);
-      } else {
-        // Move the folder object
-        store.delete(srcKey + FOLDER_SUFFIX);
-        store.storeEmptyFile(dstKey + FOLDER_SUFFIX);
+      srcIsFile = !getFileStatus(src).isDir();
+    } catch (FileNotFoundException e) {
+      LOG.debug(debugPreamble + "returning false as src does not exist");
+      return false;
+    }
+    if (srcIsFile) {
+      LOG.debug(debugPreamble + "src is file, so doing copy then delete in S3");
+      store.copy(srcKey, dstKey);
+      store.delete(srcKey);
+    } else {
+      LOG.debug(debugPreamble + "src is directory, so copying contents");
+      store.storeEmptyFile(dstKey + FOLDER_SUFFIX);
 
-        // Move everything inside the folder
-        String priorLastKey = null;
-        do {
-          PartialListing listing = store.listAll(srcKey, S3_MAX_LISTING_LENGTH,
-              priorLastKey);
-          for (FileMetadata file : listing.getFiles()) {
-            store.rename(file.getKey(), dstKey
-                + file.getKey().substring(srcKey.length()));
-          }
-          priorLastKey = listing.getPriorLastKey();
-        } while (priorLastKey != null);
-      }
+      List<String> keysToDelete = new ArrayList<String>();
+      String priorLastKey = null;
+      do {
+        PartialListing listing = store.list(srcKey, S3_MAX_LISTING_LENGTH, priorLastKey, true);
+        for (FileMetadata file : listing.getFiles()) {
+          keysToDelete.add(file.getKey());
+          store.copy(file.getKey(), dstKey + file.getKey().substring(srcKey.length()));
+        }
+        priorLastKey = listing.getPriorLastKey();
+      } while (priorLastKey != null);
 
-      createParent(src);
-      return true;
+      LOG.debug(debugPreamble + "all files in src copied, now removing src files");
+      for (String key: keysToDelete) {
+        store.delete(key);
+      }
 
-    } catch (FileNotFoundException e) {
-      // Source file does not exist;
-      return false;
+      try {
+        store.delete(srcKey + FOLDER_SUFFIX);
+      } catch (FileNotFoundException e) {
+        //this is fine, we don't require a marker
+      }
+      LOG.debug(debugPreamble + "done");
     }
-  }
 
+    return true;
+  }
 
   /**
    * Set the working directory to the given directory.
@@ -574,5 +606,4 @@ public class NativeS3FileSystem extends 
   public Path getWorkingDirectory() {
     return workingDir;
   }
-
 }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/http/HttpServer.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/http/HttpServer.java Fri Jun 21 06:37:27 2013
@@ -42,6 +42,7 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.ConfServlet;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.jmx.JMXJsonServlet;
@@ -86,11 +87,13 @@ public class HttpServer implements Filte
 
   static final String FILTER_INITIALIZER_PROPERTY
       = "hadoop.http.filter.initializers";
-
+ 
   // The ServletContext attribute where the daemon Configuration
   // gets stored.
-  static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
+  public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
   static final String ADMINS_ACL = "admins.acl";
+  public static final String SPNEGO_FILTER = "SpnegoFilter";
+  public static final String KRB5_FILTER = "krb5Filter";
 
   private AccessControlList adminsAcl;
 
@@ -174,7 +177,7 @@ public class HttpServer implements Filte
 
     addDefaultApps(contexts, appDir);
     
-    defineFilter(webAppContext, "krb5Filter", 
+    defineFilter(webAppContext, KRB5_FILTER, 
         Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(), 
         null, null);
     
@@ -247,6 +250,12 @@ public class HttpServer implements Filte
       Context logContext = new Context(parent, "/logs");
       logContext.setResourceBase(logDir);
       logContext.addServlet(AdminAuthorizedServlet.class, "/");
+      if (conf.getBoolean(
+          CommonConfigurationKeys.HADOOP_JETTY_LOGS_SERVE_ALIASES,
+          CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
+        logContext.getInitParams().put(
+            "org.mortbay.jetty.servlet.Default.aliases", "true");
+      }
       logContext.setDisplayName("logs");
       setContextAttributes(logContext);
       defaultContexts.put(logContext, true);
@@ -273,6 +282,7 @@ public class HttpServer implements Filte
     addServlet("stacks", "/stacks", StackServlet.class);
     addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
     addServlet("jmx", "/jmx", JMXJsonServlet.class);
+    addServlet("conf", "/conf", ConfServlet.class);
   }
 
   public void addContext(Context ctxt, boolean isFiltered)
@@ -343,7 +353,7 @@ public class HttpServer implements Filte
    */
   public void addServlet(String name, String pathSpec,
       Class<? extends HttpServlet> clazz) {
-    addInternalServlet(name, pathSpec, clazz, false);
+    addInternalServlet(name, pathSpec, clazz, false, false);
     addFilterPathMapping(pathSpec, webAppContext);
   }
 
@@ -357,7 +367,7 @@ public class HttpServer implements Filte
   @Deprecated
   public void addInternalServlet(String name, String pathSpec,
       Class<? extends HttpServlet> clazz) {
-    addInternalServlet(name, pathSpec, clazz, false);
+    addInternalServlet(name, pathSpec, clazz, false, false);
   }
 
   /**
@@ -365,15 +375,18 @@ public class HttpServer implements Filte
    * protect with Kerberos authentication. 
    * Note: This method is to be used for adding servlets that facilitate
    * internal communication and not for user facing functionality. For
-   * servlets added using this method, filters (except internal Kerberized
+   * servlets added using this method, filters (except internal Kerberos
    * filters) are not enabled. 
    * 
    * @param name The name of the servlet (can be passed as null)
    * @param pathSpec The path spec for the servlet
    * @param clazz The servlet class
+   * @param requireAuth Require Kerberos authenticate to access servlet
+   * @param useKsslForAuth true to use KSSL for auth, false to use SPNEGO
    */
   public void addInternalServlet(String name, String pathSpec, 
-      Class<? extends HttpServlet> clazz, boolean requireAuth) {
+      Class<? extends HttpServlet> clazz, boolean requireAuth,
+      boolean useKsslForAuth) {
     ServletHolder holder = new ServletHolder(clazz);
     if (name != null) {
       holder.setName(name);
@@ -381,11 +394,16 @@ public class HttpServer implements Filte
     webAppContext.addServlet(holder, pathSpec);
     
     if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
-       LOG.info("Adding Kerberos filter to " + name);
        ServletHandler handler = webAppContext.getServletHandler();
        FilterMapping fmap = new FilterMapping();
        fmap.setPathSpec(pathSpec);
-       fmap.setFilterName("krb5Filter");
+       if (useKsslForAuth) {
+         LOG.info("Adding Kerberos (KSSL) filter to " + name);
+         fmap.setFilterName(KRB5_FILTER);
+       } else {
+         LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
+         fmap.setFilterName(SPNEGO_FILTER);
+       }
        fmap.setDispatches(Handler.ALL);
        handler.addFilterMapping(fmap);
     }
@@ -645,6 +663,24 @@ public class HttpServer implements Filte
           listener.setPort((oriPort += 1));
         }
       }
+      // Make sure there is no handler failures.
+      Handler[] handlers = webServer.getHandlers();
+      for (int i = 0; i < handlers.length; i++) {
+        if (handlers[i].isFailed()) {
+          throw new IOException(
+              "Problem in starting http server. Server handlers failed");
+        }
+      }
+      
+      // Make sure there are no errors initializing the context.
+      Throwable unavailableException = webAppContext.getUnavailableException();
+      if (unavailableException != null) {
+        // Have to stop the webserver, or else its non-daemon threads
+        // will hang forever.
+        webServer.stop();
+        throw new IOException("Unable to initialize WebAppContext",
+            unavailableException);
+      }
     } catch (IOException e) {
       throw e;
     } catch (Exception e) {
@@ -665,6 +701,37 @@ public class HttpServer implements Filte
   }
 
   /**
+   * Checks the user has privileges to access to instrumentation servlets.
+   * <p/>
+   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to 
+   * FALSE (default value) it returns always returns TRUE.
+   * <p/>
+   * If <code>hadoop.security.instrumentation.requires.admin</code> is set to 
+   * TRUE it will check that if the current user is in the admin ACLS. If the 
+   * user is in the admin ACLs it returns TRUE, otherwise it returns FALSE.
+   *
+   * @param servletContext the servlet context.
+   * @param request the servlet request.
+   * @param response the servlet response.
+   * @return TRUE/FALSE based on the logic decribed above.
+   */
+  public static boolean isInstrumentationAccessAllowed(
+      ServletContext servletContext, HttpServletRequest request,
+      HttpServletResponse response) throws IOException {
+    Configuration conf = (Configuration) servletContext
+        .getAttribute(CONF_CONTEXT_ATTRIBUTE);
+
+    boolean access = true;
+    boolean adminAccess = conf.getBoolean(
+        CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+        false);
+    if (adminAccess) {
+      access = hasAdministratorAccess(servletContext, request, response);
+    }
+    return access;
+  }
+  
+  /**
    * Does the user sending the HttpServletRequest has the administrator ACLs? If
    * it isn't the case, response will be modified to send an error to the user.
    * 
@@ -688,7 +755,10 @@ public class HttpServer implements Filte
 
     String remoteUser = request.getRemoteUser();
     if (remoteUser == null) {
-      return true;
+      response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
+                         "Unauthenticated users are not " +
+                         "authorized to access this page.");
+      return false;
     }
     AccessControlList adminsAcl = (AccessControlList) servletContext
         .getAttribute(ADMINS_ACL);
@@ -697,9 +767,7 @@ public class HttpServer implements Filte
     if (adminsAcl != null) {
       if (!adminsAcl.isUserAllowed(remoteUserUGI)) {
         response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
-            + remoteUser + " is unauthorized to access this page. "
-            + "AccessControlList for accessing this page : "
-            + adminsAcl.toString());
+            + remoteUser + " is unauthorized to access this page.");
         return false;
       }
     }
@@ -720,8 +788,8 @@ public class HttpServer implements Filte
       throws ServletException, IOException {
 
       // Do the authorization
-      if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
-          response)) {
+      if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+          request, response)) {
         return;
       }
 

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/AbstractMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/AbstractMapWritable.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/AbstractMapWritable.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/AbstractMapWritable.java Fri Jun 21 06:37:27 2013
@@ -43,10 +43,10 @@ public abstract class AbstractMapWritabl
   private AtomicReference<Configuration> conf;
   
   /* Class to id mappings */
-  private Map<Class, Byte> classToIdMap = new ConcurrentHashMap<Class, Byte>();
+  Map<Class, Byte> classToIdMap = new ConcurrentHashMap<Class, Byte>();
   
   /* Id to Class mappings */
-  private Map<Byte, Class> idToClassMap = new ConcurrentHashMap<Byte, Class>();
+  Map<Byte, Class> idToClassMap = new ConcurrentHashMap<Byte, Class>();
   
   /* The number of new classes (those not established by the constructor) */
   private volatile byte newClasses = 0;

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/BloomMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/BloomMapFile.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/BloomMapFile.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/BloomMapFile.java Fri Jun 21 06:37:27 2013
@@ -58,6 +58,16 @@ public class BloomMapFile {
     fs.delete(bloom, true);
     fs.delete(dir, true);
   }
+
+  private static byte[] byteArrayForBloomKey(DataOutputBuffer buf) {
+    int cleanLength = buf.getLength();
+    byte [] ba = buf.getData();
+    if (cleanLength != ba.length) {
+      ba = new byte[cleanLength];
+      System.arraycopy(buf.getData(), 0, ba, 0, cleanLength);
+    }
+    return ba;
+  }
   
   public static class Writer extends MapFile.Writer {
     private DynamicBloomFilter bloomFilter;
@@ -163,7 +173,7 @@ public class BloomMapFile {
       super.append(key, val);
       buf.reset();
       key.write(buf);
-      bloomKey.set(buf.getData(), 1.0);
+      bloomKey.set(byteArrayForBloomKey(buf), 1.0);
       bloomFilter.add(bloomKey);
     }
 
@@ -228,7 +238,7 @@ public class BloomMapFile {
       }
       buf.reset();
       key.write(buf);
-      bloomKey.set(buf.getData(), 1.0);
+      bloomKey.set(byteArrayForBloomKey(buf), 1.0);
       return bloomFilter.membershipTest(bloomKey);
     }
     

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/IOUtils.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/IOUtils.java Fri Jun 21 06:37:27 2013
@@ -138,6 +138,28 @@ public class IOUtils {
     }
   }
 
+  /**
+   * Utility wrapper for reading from {@link InputStream}. It catches any errors
+   * thrown by the underlying stream (either IO or decompression-related), and
+   * re-throws as an IOException.
+   * 
+   * @param is - InputStream to be read from
+   * @param buf - buffer the data is read into
+   * @param off - offset within buf
+   * @param len - amount of data to be read
+   * @return number of bytes read
+   */
+  public static int wrappedReadForCompressedData(InputStream is, byte[] buf,
+      int off, int len) throws IOException {
+    try {
+      return is.read(buf, off, len);
+    } catch (IOException ie) {
+      throw ie;
+    } catch (Throwable t) {
+      throw new IOException("Error while reading compressed data", t);
+    }
+  }
+
   /** Reads len bytes in a loop.
    * @param in The InputStream to read from
    * @param buf The buffer to fill