You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/10/31 20:42:42 UTC

svn commit: r469622 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/io/ArrayFile.java src/java/org/apache/hadoop/io/MapFile.java src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java

Author: cutting
Date: Tue Oct 31 11:42:41 2006
New Revision: 469622

URL: http://svn.apache.org/viewvc?view=rev&rev=469622
Log:
HADOOP-636.  Add MapFile and ArrayFile constructors which accept a Progressable.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=469622&r1=469621&r2=469622
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Oct 31 11:42:41 2006
@@ -82,6 +82,11 @@
 
 22. HADOOP-658.  Update source file headers per Apache policy.  (cutting)
 
+23. HADOOP-636.  Add MapFile & ArrayFile constructors which accept a
+    Progressable, and pass it down to SequenceFile.  This permits
+    reduce tasks which use MapFile to still report progress while
+    writing blocks to the filesystem.  (cutting)
+
 
 Release 0.7.2 - 2006-10-18
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java?view=diff&rev=469622&r1=469621&r2=469622
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java Tue Oct 31 11:42:41 2006
@@ -21,6 +21,9 @@
 import java.io.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.*;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+
 
 /** A dense file-based mapping from integers to values. */
 public class ArrayFile extends MapFile {
@@ -31,9 +34,20 @@
   public static class Writer extends MapFile.Writer {
     private LongWritable count = new LongWritable(0);
 
-    /** Create the named file for values of the named class. */
-    public Writer(FileSystem fs, String file, Class valClass) throws IOException {
+    /** Create the named file for values of the named class.
+     * @deprecated specify {@link CompressionType} and {@link Progressable}
+     */
+    public Writer(FileSystem fs, String file, Class valClass)
+      throws IOException {
       super(fs, file, LongWritable.class, valClass);
+    }
+
+    /** Create the named file for values of the named class. */
+    public Writer(Configuration conf, FileSystem fs,
+                  String file, Class valClass,
+                  CompressionType compress, Progressable progress)
+      throws IOException {
+      super(conf, fs, file, LongWritable.class, valClass, compress, progress);
     }
 
     /** Append a value to the file. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java?view=diff&rev=469622&r1=469621&r2=469622
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java Tue Oct 31 11:42:41 2006
@@ -21,6 +21,7 @@
 import java.io.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 
 /** A file-based map from keys to values.
@@ -73,7 +74,7 @@
     }
 
     /** Create the named map for keys of the named class.
-     * @deprecated specify a {@link CompressionType} instead
+     * @deprecated specify {@link CompressionType} and {@link Progressable}
      */
     public Writer(FileSystem fs, String dirName,
                   Class keyClass, Class valClass, boolean compress)
@@ -82,19 +83,30 @@
     }
     /** Create the named map for keys of the named class. */
     public Writer(Configuration conf, FileSystem fs, String dirName,
+                  Class keyClass, Class valClass,
+                  CompressionType compress, Progressable progress)
+      throws IOException {
+      this(conf,fs,dirName,WritableComparator.get(keyClass),valClass,
+           compress, progress);
+    }
+
+    /** Create the named map for keys of the named class. */
+    public Writer(Configuration conf, FileSystem fs, String dirName,
                   Class keyClass, Class valClass, CompressionType compress)
       throws IOException {
       this(conf,fs,dirName,WritableComparator.get(keyClass),valClass,compress);
     }
 
-    /** Create the named map using the named key comparator. */
+    /** Create the named map using the named key comparator.
+     * @deprecated specify {@link CompressionType} and {@link Progressable}
+     */
     public Writer(FileSystem fs, String dirName,
                   WritableComparator comparator, Class valClass)
       throws IOException {
       this(fs, dirName, comparator, valClass, false);
     }
     /** Create the named map using the named key comparator.
-     * @deprecated specify a {@link CompressionType} instead
+     * @deprecated specify {@link CompressionType} and {@link Progressable}
      */
     public Writer(FileSystem fs, String dirName,
                   WritableComparator comparator, Class valClass,
@@ -105,11 +117,21 @@
            compress ? CompressionType.RECORD : CompressionType.NONE);
     }
 
-    /** Create the named map using the named key comparator. */
+    /** Create the named map using the named key comparator.
+     * @deprecated specify a {@link Progressable}
+     */
     public Writer(Configuration conf, FileSystem fs, String dirName,
                   WritableComparator comparator, Class valClass,
                   SequenceFile.CompressionType compress)
       throws IOException {
+      this(conf, fs, dirName, comparator, valClass, compress, null);
+    }
+    /** Create the named map using the named key comparator. */
+    public Writer(Configuration conf, FileSystem fs, String dirName,
+                  WritableComparator comparator, Class valClass,
+                  SequenceFile.CompressionType compress,
+                  Progressable progress)
+      throws IOException {
 
       this.comparator = comparator;
       this.lastKey = comparator.newKey();
@@ -123,10 +145,11 @@
       Class keyClass = comparator.getKeyClass();
       this.data =
         SequenceFile.createWriter
-        (fs,conf,dataFile,keyClass,valClass,compress);
+        (fs,conf,dataFile,keyClass,valClass,compress,progress);
       this.index =
         SequenceFile.createWriter
-        (fs,conf,indexFile,keyClass,LongWritable.class,CompressionType.BLOCK);
+        (fs, conf, indexFile, keyClass, LongWritable.class,
+         CompressionType.BLOCK, progress);
     }
     
     /** The number of entries that are added before an index entry is added.*/

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java?view=diff&rev=469622&r1=469621&r2=469622
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java Tue Oct 31 11:42:41 2006
@@ -46,7 +46,8 @@
       new MapFile.Writer(job, fs, file.toString(),
                          job.getMapOutputKeyClass(),
                          job.getMapOutputValueClass(),
-                         SequenceFile.getCompressionType(job));
+                         SequenceFile.getCompressionType(job),
+                         progress);
 
     return new RecordWriter() {