You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2009/02/10 19:05:09 UTC

svn commit: r743035 - in /hadoop/core/trunk: ./ bin/ src/core/org/apache/hadoop/fs/s3/ src/core/org/apache/hadoop/fs/s3native/ src/hdfs/org/apache/hadoop/hdfs/ src/test/org/apache/hadoop/fs/

Author: omalley
Date: Tue Feb 10 18:05:08 2009
New Revision: 743035

URL: http://svn.apache.org/viewvc?rev=743035&view=rev
Log:
HADOOP-5212. Fix the path translation problem introduced by HADOOP-4868 
running on cygwin. (Sharad Agarwal via omalley)

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/bin/hadoop
    hadoop/core/trunk/bin/hadoop-config.sh
    hadoop/core/trunk/bin/hdfs
    hadoop/core/trunk/bin/mapred
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Feb 10 18:05:08 2009
@@ -92,8 +92,8 @@
     tools, and example jars. Let findbugs depend on this rather than the 'tar'
     target. (Giridharan Kesavan via cdouglas)
 
-    HADOOP-4868. Splits the hadoop script into three parts - bin/hadoop, bin/mapred and 
-    bin/hdfs. (Sharad Agarwal via ddas)
+    HADOOP-4868. Splits the hadoop script into three parts - bin/hadoop, 
+    bin/mapred and bin/hdfs. (Sharad Agarwal via ddas)
 
   OPTIMIZATIONS
 
@@ -151,6 +151,9 @@
     task-controller be an independent target in build.xml.
     (Sreekanth Ramakrishnan via yhemanth)
 
+    HADOOP-5212. Fix the path translation problem introduced by HADOOP-4868 
+    running on cygwin. (Sharad Agarwal via omalley)
+    
 Release 0.20.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/core/trunk/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hadoop?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hadoop (original)
+++ hadoop/core/trunk/bin/hadoop Tue Feb 10 18:05:08 2009
@@ -98,6 +98,10 @@
       CLASS=$COMMAND
     fi
     shift
+    
+    if $cygwin; then
+      CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+    fi
     exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
     ;;
 

Modified: hadoop/core/trunk/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hadoop-config.sh?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hadoop-config.sh (original)
+++ hadoop/core/trunk/bin/hadoop-config.sh Tue Feb 10 18:05:08 2009
@@ -39,7 +39,7 @@
 # the root of the Hadoop installation
 #TODO: change the env variable when dir structure is changed
 export HADOOP_HOME=`dirname "$this"`/..
-export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$HADOOP_HOME}"
+export HADOOP_CORE_HOME="${HADOOP_HOME}"
 #export HADOOP_HOME=`dirname "$this"`/../..
 #export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-`dirname "$this"`/..}"
 
@@ -56,7 +56,7 @@
 fi
  
 # Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
 
 #check to see it is specified whether to use the slaves or the
 # masters file
@@ -175,7 +175,6 @@
 
 # cygwin path translation
 if $cygwin; then
-  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
   HADOOP_CORE_HOME=`cygpath -w "$HADOOP_CORE_HOME"`
   HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
   TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`

Modified: hadoop/core/trunk/bin/hdfs
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hdfs?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hdfs (original)
+++ hadoop/core/trunk/bin/hdfs Tue Feb 10 18:05:08 2009
@@ -96,4 +96,8 @@
   CLASSPATH=${CLASSPATH}:$f;
 done
 
+if $cygwin; then
+  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+fi
+
 exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

Modified: hadoop/core/trunk/bin/mapred
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/bin/mapred?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/bin/mapred (original)
+++ hadoop/core/trunk/bin/mapred Tue Feb 10 18:05:08 2009
@@ -93,4 +93,8 @@
   CLASSPATH=${CLASSPATH}:$f;
 done
 
+if $cygwin; then
+  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+fi
+
 exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3InputStream.java Tue Feb 10 18:05:08 2009
@@ -168,7 +168,7 @@
   @Override
   public void close() throws IOException {
     if (closed) {
-      throw new IOException("Stream closed");
+      return;
     }
     if (blockStream != null) {
       blockStream.close();

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3OutputStream.java Tue Feb 10 18:05:08 2009
@@ -200,7 +200,7 @@
   @Override
   public synchronized void close() throws IOException {
     if (closed) {
-      throw new IOException("Stream closed");
+      return;
     }
 
     flush();

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Tue Feb 10 18:05:08 2009
@@ -85,6 +85,7 @@
       this.key = key;
     }
     
+    @Override
     public synchronized int read() throws IOException {
       int result = in.read();
       if (result != -1) {
@@ -92,6 +93,7 @@
       }
       return result;
     }
+    @Override
     public synchronized int read(byte[] b, int off, int len)
       throws IOException {
       
@@ -102,18 +104,22 @@
       return result;
     }
 
+    @Override
     public void close() throws IOException {
       in.close();
     }
 
+    @Override
     public synchronized void seek(long pos) throws IOException {
       in.close();
       in = store.retrieve(key, pos);
       this.pos = pos;
     }
+    @Override
     public synchronized long getPos() throws IOException {
       return pos;
     }
+    @Override
     public boolean seekToNewSource(long targetPos) throws IOException {
       return false;
     }
@@ -164,7 +170,7 @@
     @Override
     public synchronized void close() throws IOException {
       if (closed) {
-        throw new IOException("Stream closed");
+        return;
       }
 
       backupStream.close();
@@ -260,6 +266,7 @@
   }
 
   /** This optional operation is not yet supported. */
+  @Override
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     throw new IOException("Not supported");

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Tue Feb 10 18:05:08 2009
@@ -17,39 +17,85 @@
  */
 package org.apache.hadoop.hdfs;
 
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.io.retry.RetryPolicies;
-import org.apache.hadoop.io.retry.RetryPolicy;
-import org.apache.hadoop.io.retry.RetryProxy;
-import org.apache.hadoop.fs.*;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.CRC32;
+
+import javax.net.SocketFactory;
+import javax.security.auth.login.LoginException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.ChecksumException;
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSInputChecker;
+import org.apache.hadoop.fs.FSInputStream;
+import org.apache.hadoop.fs.FSOutputSummer;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.Syncable;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.ipc.*;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.net.NodeBase;
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.hdfs.protocol.*;
+import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.DataTransferProtocol;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.MD5Hash;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.retry.RetryPolicies;
+import org.apache.hadoop.io.retry.RetryPolicy;
+import org.apache.hadoop.io.retry.RetryProxy;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UnixUserGroupInformation;
-import org.apache.hadoop.util.*;
-
-import org.apache.commons.logging.*;
-
-import java.io.*;
-import java.net.*;
-import java.util.*;
-import java.util.zip.CRC32;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.ConcurrentHashMap;
-import java.nio.BufferOverflowException;
-import java.nio.ByteBuffer;
-
-import javax.net.SocketFactory;
-import javax.security.auth.login.LoginException;
+import org.apache.hadoop.util.Daemon;
+import org.apache.hadoop.util.DataChecksum;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.StringUtils;
 
 /********************************************************
  * DFSClient can connect to a Hadoop Filesystem and 
@@ -951,6 +997,7 @@
     }
 
     /** {@inheritDoc} */
+    @Override
     public String toString() {
       String s = getClass().getSimpleName();
       if (LOG.isTraceEnabled()) {
@@ -1528,7 +1575,7 @@
     public synchronized void close() throws IOException {
       checkOpen();
       if (closed) {
-        throw new IOException("Stream closed");
+        return;
       }
 
       if ( blockReader != null ) {
@@ -2096,6 +2143,7 @@
 
       private volatile boolean closed = false;
   
+      @Override
       public void run() {
 
         while (!closed && clientRunning) {
@@ -2264,6 +2312,7 @@
         this.targets = targets;
       }
 
+      @Override
       public void run() {
 
         this.setName("ResponseProcessor for block " + block);
@@ -2483,12 +2532,8 @@
     }
 
     private void isClosed() throws IOException {
-      if (closed) {
-        if (lastException != null) {
+      if (closed && lastException != null) {
           throw lastException;
-        } else {
-          throw new IOException("Stream closed.");
-        }
       }
     }
 
@@ -3010,6 +3055,8 @@
      */
     @Override
     public void close() throws IOException {
+      if(closed)
+        return;
       closeInternal();
       leasechecker.remove(src);
       
@@ -3143,6 +3190,7 @@
   }
 
   /** {@inheritDoc} */
+  @Override
   public String toString() {
     return getClass().getSimpleName() + "[clientName=" + clientName
         + ", ugi=" + ugi + "]"; 

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java?rev=743035&r1=743034&r2=743035&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java Tue Feb 10 18:05:08 2009
@@ -23,12 +23,6 @@
 
 import junit.framework.TestCase;
 
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
 /**
  * <p>
  * A collection of tests for the contract of the {@link FileSystem}.
@@ -432,6 +426,26 @@
         fs.exists(path("/test/new/newdir/dir/subdir/file2")));
   }
 
+  public void testInputStreamClosedTwice() throws IOException {
+    //HADOOP-4760 according to Closeable#close() closing already-closed 
+    //streams should have no effect. 
+    Path src = path("/test/hadoop/file");
+    createFile(src);
+    FSDataInputStream in = fs.open(src);
+    in.close();
+    in.close();
+  }
+  
+  public void testOutputStreamClosedTwice() throws IOException {
+    //HADOOP-4760 according to Closeable#close() closing already-closed 
+    //streams should have no effect. 
+    Path src = path("/test/hadoop/file");
+    FSDataOutputStream out = fs.create(src);
+    out.writeChar('H'); //write some data
+    out.close();
+    out.close();
+  }
+  
   protected Path path(String pathString) {
     return new Path(pathString).makeQualified(fs);
   }