You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 04:31:35 UTC

svn commit: r1077028 - in /hadoop/common/branches/branch-0.20-security-patches/src: core/org/apache/hadoop/fs/HarFileSystem.java test/org/apache/hadoop/fs/TestHarFileSystem.java

Author: omalley
Date: Fri Mar  4 03:31:35 2011
New Revision: 1077028

URL: http://svn.apache.org/viewvc?rev=1077028&view=rev
Log:
commit 2fbe620852d47a27f28efa17de1fd9378bd9170b
Author: Mahadev Konar <ma...@cdev6022.inktomisearch.com>
Date:   Tue Oct 20 23:58:31 2009 +0000

    HADOOP-6097 from https://issues.apache.org/jira/secure/attachment/12422737/HADOOP-6097-0.20.patch
    
    +++ b/YAHOO-CHANGES.txt
    +    HADOOP-6097. Multiple bugs w/ Hadoop archives (mahadev)
    +

Modified:
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestHarFileSystem.java

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/HarFileSystem.java?rev=1077028&r1=1077027&r2=1077028&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/HarFileSystem.java Fri Mar  4 03:31:35 2011
@@ -301,19 +301,8 @@ public class HarFileSystem extends Filte
     }
 
     URI tmpURI = fsPath.toUri();
-    fsPath = new Path(tmpURI.getPath());
     //change this to Har uri 
-    URI tmp = null;
-    try {
-      tmp = new URI(uri.getScheme(), harAuth, fsPath.toString(),
-                    tmpURI.getQuery(), tmpURI.getFragment());
-    } catch(URISyntaxException ue) {
-      LOG.error("Error in URI ", ue);
-    }
-    if (tmp != null) {
-      return new Path(tmp.toString());
-    }
-    return null;
+    return new Path(uri.getScheme(), harAuth, tmpURI.getPath());
   }
   
   /**
@@ -425,12 +414,13 @@ public class HarFileSystem extends Filte
       // do nothing just a read.
     }
     FSDataInputStream aIn = fs.open(archiveIndex);
-    LineReader aLin = new LineReader(aIn, getConf());
+    LineReader aLin;
     String retStr = null;
     // now start reading the real index file
-     read = 0;
     for (Store s: stores) {
+      read = 0;
       aIn.seek(s.begin);
+      aLin = new LineReader(aIn, getConf());
       while (read + s.begin < s.end) {
         int tmp = aLin.readLine(line);
         read += tmp;

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1077028&r1=1077027&r2=1077028&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestHarFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestHarFileSystem.java Fri Mar  4 03:31:35 2011
@@ -57,7 +57,7 @@ public class TestHarFileSystem extends T
   private MiniDFSCluster dfscluster;
   private MiniMRCluster mapred;
   private FileSystem fs;
-  private Path filea, fileb, filec;
+  private Path filea, fileb, filec, filed;
   private Path archivePath;
   
   protected void setUp() throws Exception {
@@ -69,6 +69,9 @@ public class TestHarFileSystem extends T
     filea = new Path(inputPath,"a");
     fileb = new Path(inputPath,"b");
     filec = new Path(inputPath,"c");
+    // check for har containing escape worthy characters
+    // in there name
+    filed = new Path(inputPath, "d%d");
     archivePath = new Path(fs.getHomeDirectory(), "tmp");
   }
   
@@ -121,7 +124,14 @@ public class TestHarFileSystem extends T
     out = fs.create(filec);
     out.write("c".getBytes());
     out.close();
+    out = fs.create(filed);
+    out.write("d".getBytes());
+    out.close();
     Configuration conf = mapred.createJobConf();
+    
+    // check to see if fs.har.impl.disable.cache is true
+    boolean archivecaching = conf.getBoolean("fs.har.impl.disable.cache", false);
+    assertTrue(archivecaching);
     HadoopArchives har = new HadoopArchives(conf);
     String[] args = new String[3];
     //check for destination not specfied
@@ -179,6 +189,7 @@ public class TestHarFileSystem extends T
     Path harFilea = new Path(harPath, "a");
     Path harFileb = new Path(harPath, "b");
     Path harFilec = new Path(harPath, "c");
+    Path harFiled = new Path(harPath, "d%d");
     FileSystem harFs = harFilea.getFileSystem(conf);
     FSDataInputStream fin = harFs.open(harFilea);
     byte[] b = new byte[4];
@@ -193,6 +204,11 @@ public class TestHarFileSystem extends T
     fin.read(b);
     fin.close();
     assertTrue("strings are equal ", (b[0] == "c".getBytes()[0]));
+    fin = harFs.open(harFiled);
+    fin.read(b);
+    fin.close();
+    assertTrue("strings are equal ", (b[0] == "d".getBytes()[0]));
+    
     // ok all files match 
     // run a map reduce job
     Path outdir = new Path(fs.getHomeDirectory(), "mapout"); 
@@ -213,11 +229,11 @@ public class TestHarFileSystem extends T
     FileStatus[] status = fs.globStatus(new Path(outdir, "part*"));
     Path reduceFile = status[0].getPath();
     FSDataInputStream reduceIn = fs.open(reduceFile);
-    b = new byte[6];
+    b = new byte[8];
     reduceIn.read(b);
-    //assuming all the 6 bytes were read.
+    //assuming all the 8 bytes were read.
     Text readTxt = new Text(b);
-    assertTrue("a\nb\nc\n".equals(readTxt.toString()));
+    assertTrue("a\nb\nc\nd\n".equals(readTxt.toString()));
     assertTrue("number of bytes left should be -1", reduceIn.read(b) == -1);
     reduceIn.close();
   }