You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/02/03 21:34:55 UTC

svn commit: r374738 [2/3] - in /lucene/hadoop/trunk: bin/ lib/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/ipc/ src/java/org/apache/hadoop/map...

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.lang.reflect.Proxy;
 import java.lang.reflect.Method;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.*;
 import java.util.*;
@@ -49,9 +49,9 @@
 
   /** Write key/value pairs to a sequence-format file. */
   public static class Writer {
-    private NFSDataOutputStream out;
+    private FSDataOutputStream out;
     private DataOutputBuffer buffer = new DataOutputBuffer();
-    private NutchFileSystem nfs = null;
+    private FileSystem fs = null;
     private File target = null;
 
     private Class keyClass;
@@ -78,32 +78,32 @@
     }
 
     /** Create the named file. */
-    public Writer(NutchFileSystem nfs, String name,
+    public Writer(FileSystem fs, String name,
                   Class keyClass, Class valClass)
       throws IOException {
-      this(nfs, name, keyClass, valClass, false);
+      this(fs, name, keyClass, valClass, false);
     }
     
     /** Create the named file.
      * @param compress if true, values are compressed.
      */
-    public Writer(NutchFileSystem nfs, String name,
+    public Writer(FileSystem fs, String name,
                   Class keyClass, Class valClass, boolean compress)
       throws IOException {
-      this.nfs = nfs;
+      this.fs = fs;
       this.target = new File(name);
-      init(nfs.create(target), keyClass, valClass, compress);
+      init(fs.create(target), keyClass, valClass, compress);
     }
     
     /** Write to an arbitrary stream using a specified buffer size. */
-    private Writer(NFSDataOutputStream out,
+    private Writer(FSDataOutputStream out,
                    Class keyClass, Class valClass, boolean compress)
       throws IOException {
       init(out, keyClass, valClass, compress);
     }
     
     /** Write and flush the file header. */
-    private void init(NFSDataOutputStream out,
+    private void init(FSDataOutputStream out,
                       Class keyClass, Class valClass,
                       boolean compress) throws IOException {
       this.out = out;
@@ -202,10 +202,10 @@
   /** Writes key/value pairs from a sequence-format file. */
   public static class Reader {
     private String file;
-    private NFSDataInputStream in;
+    private FSDataInputStream in;
     private DataOutputBuffer outBuf = new DataOutputBuffer();
     private DataInputBuffer inBuf = new DataInputBuffer();
-    private NutchFileSystem nfs = null;
+    private FileSystem fs = null;
 
     private byte[] version = new byte[VERSION.length];
 
@@ -226,25 +226,25 @@
     private Configuration conf;
 
     /** Open the named file. */
-    public Reader(NutchFileSystem nfs, String file, Configuration conf) throws IOException {
-      this(nfs, file, conf.getInt("io.file.buffer.size", 4096));
+    public Reader(FileSystem fs, String file, Configuration conf) throws IOException {
+      this(fs, file, conf.getInt("io.file.buffer.size", 4096));
       this.conf = conf;
     }
 
-    private Reader(NutchFileSystem nfs, String name, int bufferSize) throws IOException {
-      this.nfs = nfs;
+    private Reader(FileSystem fs, String name, int bufferSize) throws IOException {
+      this.fs = fs;
       this.file = name;
       File file = new File(name);
-      this.in = nfs.open(file, bufferSize);
-      this.end = nfs.getLength(file);
+      this.in = fs.open(file, bufferSize);
+      this.end = fs.getLength(file);
       init();
     }
     
-    private Reader(NutchFileSystem nfs, String file, int bufferSize, long start, long length)
+    private Reader(FileSystem fs, String file, int bufferSize, long start, long length)
       throws IOException {
-      this.nfs = nfs;
+      this.fs = fs;
       this.file = file;
-      this.in = nfs.open(new File(file), bufferSize);
+      this.in = fs.open(new File(file), bufferSize);
       seek(start);
       init();
 
@@ -465,7 +465,7 @@
     private int memory; // bytes
     private int factor; // merged per pass
 
-    private NutchFileSystem nfs = null;
+    private FileSystem fs = null;
 
     private Class keyClass;
     private Class valClass;
@@ -473,13 +473,13 @@
     private Configuration conf;
 
     /** Sort and merge files containing the named classes. */
-    public Sorter(NutchFileSystem nfs, Class keyClass, Class valClass, Configuration conf)  {
-      this(nfs, new WritableComparator(keyClass), valClass, conf);
+    public Sorter(FileSystem fs, Class keyClass, Class valClass, Configuration conf)  {
+      this(fs, new WritableComparator(keyClass), valClass, conf);
     }
 
     /** Sort and merge using an arbitrary {@link WritableComparator}. */
-    public Sorter(NutchFileSystem nfs, WritableComparator comparator, Class valClass, Configuration conf) {
-      this.nfs = nfs;
+    public Sorter(FileSystem fs, WritableComparator comparator, Class valClass, Configuration conf) {
+      this.fs = fs;
       this.comparator = comparator;
       this.keyClass = comparator.getKeyClass();
       this.valClass = valClass;
@@ -502,7 +502,7 @@
 
     /** Perform a file sort.*/
     public void sort(String inFile, String outFile) throws IOException {
-      if (nfs.exists(new File(outFile))) {
+      if (fs.exists(new File(outFile))) {
         throw new IOException("already exists: " + outFile);
       }
 
@@ -539,11 +539,11 @@
       private int[] lengths = new int[starts.length];
       
       private Reader in;
-      private NFSDataOutputStream out;
+      private FSDataOutputStream out;
         private String outName;
 
       public SortPass(Configuration conf) throws IOException {
-        in = new Reader(nfs, inFile, conf);
+        in = new Reader(fs, inFile, conf);
       }
       
       public int run() throws IOException {
@@ -610,7 +610,7 @@
       private void flush(int count, boolean done) throws IOException {
         if (out == null) {
           outName = done ? outFile : outFile+".0";
-          out = nfs.create(new File(outName));
+          out = fs.create(new File(outName));
         }
 
         if (!done) {                              // an intermediate file
@@ -697,7 +697,7 @@
       private boolean last;
 
       private MergeQueue queue;
-      private NFSDataInputStream in;
+      private FSDataInputStream in;
       private String inName;
 
       public MergePass(int pass, boolean last) throws IOException {
@@ -708,19 +708,19 @@
           new MergeQueue(factor, last ? outFile : outFile+"."+pass, last);
 
         this.inName = outFile+"."+(pass-1);
-        this.in = nfs.open(new File(inName));
+        this.in = fs.open(new File(inName));
       }
 
       public void close() throws IOException {
         in.close();                               // close and delete input
-        nfs.delete(new File(inName));
+        fs.delete(new File(inName));
 
         queue.close();                            // close queue
       }
 
       public int run() throws IOException {
         int segments = 0;
-        long end = nfs.getLength(new File(inName));
+        long end = fs.getLength(new File(inName));
 
         while (in.getPos() < end) {
           LOG.finer("merging segment " + segments);
@@ -734,7 +734,7 @@
 
             totalCount+= count;
 
-            Reader reader = new Reader(nfs, inName, memory/(factor+1),
+            Reader reader = new Reader(fs, inName, memory/(factor+1),
                                        in.getPos(), length);
             reader.sync = null;                   // disable sync on temp files
 
@@ -794,7 +794,7 @@
         for (int i = 0; i < inFiles.length; i++) {
           String inFile = inFiles[i];
           MergeStream ms =
-            new MergeStream(new Reader(nfs, inFile, memory/(factor+1)));
+            new MergeStream(new Reader(fs, inFile, memory/(factor+1)));
           if (ms.next())
             queue.put(ms);
         }
@@ -827,7 +827,7 @@
     }
 
     private class MergeQueue extends PriorityQueue {
-      private NFSDataOutputStream out;
+      private FSDataOutputStream out;
       private boolean done;
       private boolean compress;
 
@@ -843,7 +843,7 @@
       public MergeQueue(int size, String outName, boolean done)
         throws IOException {
         initialize(size);
-        this.out = nfs.create(new File(outName), true, memory/(factor+1));
+        this.out = fs.create(new File(outName), true, memory/(factor+1));
         this.done = done;
       }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.*;
 
@@ -30,14 +30,14 @@
   public static class Writer extends MapFile.Writer {
 
     /** Create the named set for keys of the named class. */
-    public Writer(NutchFileSystem nfs, String dirName, Class keyClass) throws IOException {
-      super(nfs, dirName, keyClass, NullWritable.class);
+    public Writer(FileSystem fs, String dirName, Class keyClass) throws IOException {
+      super(fs, dirName, keyClass, NullWritable.class);
     }
 
     /** Create the named set using the named key comparator. */
-    public Writer(NutchFileSystem nfs, String dirName, WritableComparator comparator)
+    public Writer(FileSystem fs, String dirName, WritableComparator comparator)
       throws IOException {
-      super(nfs, dirName, comparator, NullWritable.class);
+      super(fs, dirName, comparator, NullWritable.class);
     }
 
     /** Append a key to a set.  The key must be strictly greater than the
@@ -51,14 +51,14 @@
   public static class Reader extends MapFile.Reader {
 
     /** Construct a set reader for the named set.*/
-    public Reader(NutchFileSystem nfs, String dirName, Configuration conf) throws IOException {
-      super(nfs, dirName, conf);
+    public Reader(FileSystem fs, String dirName, Configuration conf) throws IOException {
+      super(fs, dirName, conf);
     }
 
     /** Construct a set reader for the named set using the named comparator.*/
-    public Reader(NutchFileSystem nfs, String dirName, WritableComparator comparator, Configuration conf)
+    public Reader(FileSystem fs, String dirName, WritableComparator comparator, Configuration conf)
       throws IOException {
-      super(nfs, dirName, comparator, conf);
+      super(fs, dirName, comparator, conf);
     }
 
     // javadoc inherited

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.*;
 import java.lang.reflect.Array;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.IOException;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.DataOutput;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.DataOutput;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 /** An interface which extends both {@link Writable} and {@link Comparable}.
  *

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.*;
 import java.util.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.util.HashMap;
 import java.io.IOException;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ipc.
+package org.apache.hadoop.ipc;
 
 import java.net.Socket;
 import java.net.InetSocketAddress;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ipc.
+package org.apache.hadoop.ipc;
 
 import java.lang.reflect.Proxy;
 import java.lang.reflect.Method;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ipc.
+package org.apache.hadoop.ipc;
 
 import java.io.IOException;
 import java.io.EOFException;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
@@ -23,11 +23,11 @@
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.UTF8;
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 /** A section of an input file.  Returned by {@link
- * InputFormat#getSplits(NutchFileSystem, JobConf, int)} and passed to
- * InputFormat#getRecordReader(NutchFileSystem,FileSplit,JobConf,Reporter). */
+ * InputFormat#getSplits(FileSystem, JobConf, int)} and passed to
+ * InputFormat#getRecordReader(FileSystem,FileSplit,JobConf,Reporter). */
 public class FileSplit implements Writable {
   private File file;
   private long start;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,14 +14,14 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
-/** An input data format.  Input files are stored in a {@link NutchFileSystem}.
+/** An input data format.  Input files are stored in a {@link FileSystem}.
  * The processing of an input file may be split across multiple machines.
  * Files are processed as sequences of records, implementing {@link
  * RecordReader}.  Files must thus be split on record boundaries. */
@@ -34,17 +34,17 @@
    * @param numSplits the desired number of splits
    * @return the splits
    */
-  FileSplit[] getSplits(NutchFileSystem fs, JobConf job, int numSplits)
+  FileSplit[] getSplits(FileSystem fs, JobConf job, int numSplits)
     throws IOException;
 
   /** Construct a {@link RecordReader} for a {@link FileSplit}.
    *
-   * @param fs the {@link NutchFileSystem}
+   * @param fs the {@link FileSystem}
    * @param split the {@link FileSplit}
    * @param job the job that this split belongs to
    * @return a {@link RecordReader}
    */
-  RecordReader getRecordReader(NutchFileSystem fs, FileSplit split,
+  RecordReader getRecordReader(FileSystem fs, FileSplit split,
                                JobConf job, Reporter reporter)
     throws IOException;
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
@@ -23,7 +23,7 @@
 import java.util.ArrayList;
 import java.util.logging.Logger;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.util.LogFormatter;
 
 /** A base class for {@link InputFormat}. */
@@ -40,7 +40,7 @@
     this.minSplitSize = minSplitSize;
   }
 
-  public abstract RecordReader getRecordReader(NutchFileSystem fs,
+  public abstract RecordReader getRecordReader(FileSystem fs,
                                                FileSplit split,
                                                JobConf job,
                                                Reporter reporter)
@@ -57,7 +57,7 @@
    * @return array of File objects, never zero length.
    * @throws IOException if zero items.
    */
-  protected File[] listFiles(NutchFileSystem fs, JobConf job)
+  protected File[] listFiles(FileSystem fs, JobConf job)
     throws IOException {
     File[] dirs = job.getInputDirs();
     String subdir = job.get("mapred.input.subdir");
@@ -87,9 +87,9 @@
     return (File[])result.toArray(new File[result.size()]);
   }
 
-  /** Splits files returned by {#listFiles(NutchFileSystem,JobConf) when
+  /** Splits files returned by {#listFiles(FileSystem,JobConf) when
    * they're too big.*/ 
-  public FileSplit[] getSplits(NutchFileSystem fs, JobConf job, int numSplits)
+  public FileSplit[] getSplits(FileSystem fs, JobConf job, int numSplits)
     throws IOException {
 
     File[] files = listFiles(fs, job);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.*;
 import org.apache.hadoop.io.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -165,7 +165,7 @@
     }
 
     JobSubmissionProtocol jobSubmitClient;
-    NutchFileSystem fs = null;
+    FileSystem fs = null;
 
     private Configuration conf;
     static Random r = new Random();
@@ -207,10 +207,10 @@
      * Get a filesystem handle.  We need this to prepare jobs
      * for submission to the MapReduce system.
      */
-    public synchronized NutchFileSystem getFs() throws IOException {
+    public synchronized FileSystem getFs() throws IOException {
       if (this.fs == null) {
         String fsName = jobSubmitClient.getFilesystemName();
-        this.fs = NutchFileSystem.getNamed(fsName, this.conf);
+        this.fs = FileSystem.getNamed(fsName, this.conf);
       }
       return fs;
     }
@@ -249,7 +249,7 @@
         }
 
         // Write job file to JobTracker's fs
-        NFSDataOutputStream out = getFs().create(submitJobFile);
+        FSDataOutputStream out = getFs().create(submitJobFile);
         try {
           job.write(out);
         } finally {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 
 import java.io.IOException;
@@ -31,7 +31,7 @@
 import java.util.List;
 import java.util.Collections;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.conf.Configuration;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 /** That what may be configured. */
 public interface JobConfigurable {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -67,7 +67,7 @@
         this.startTime = System.currentTimeMillis();
 
         this.localJobFile = new JobConf(conf).getLocalFile(JobTracker.SUBDIR, jobid + ".xml");
-        NutchFileSystem fs = NutchFileSystem.get(conf);
+        FileSystem fs = FileSystem.get(conf);
         fs.copyToLocalFile(new File(jobFile), localJobFile);
 
         JobConf jd = new JobConf(localJobFile);
@@ -99,7 +99,7 @@
         String jobFile = profile.getJobFile();
 
         JobConf jd = new JobConf(localJobFile);
-        NutchFileSystem fs = NutchFileSystem.get(conf);
+        FileSystem fs = FileSystem.get(conf);
         FileSplit[] splits =
             jd.getInputFormat().getSplits(fs, jd, numMapTasks);
 
@@ -427,7 +427,7 @@
         //
         if (deleteUponCompletion != null) {
             JobConf jd = new JobConf(deleteUponCompletion);
-            NutchFileSystem fs = NutchFileSystem.get(conf);
+            FileSystem fs = FileSystem.get(conf);
             fs.delete(new File(jd.getJar()));
             fs.delete(new File(deleteUponCompletion));
             deleteUponCompletion = null;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.*;
 import java.util.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 
 import org.apache.hadoop.io.*;
@@ -257,7 +257,7 @@
     // Some jobs are stored in a local system directory.  We can delete
     // the files when we're done with the job.
     static final String SUBDIR = "jobTracker";
-    NutchFileSystem fs;
+    FileSystem fs;
     File systemDir;
     private Configuration conf;
 
@@ -280,7 +280,7 @@
         this.conf = conf;
         JobConf jobConf = new JobConf(conf);
         this.systemDir = jobConf.getSystemDir();
-        this.fs = NutchFileSystem.get(conf);
+        this.fs = FileSystem.get(conf);
         FileUtil.fullyDelete(fs, systemDir);
         fs.mkdirs(systemDir);
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.*;
 import java.util.*;
@@ -30,7 +30,7 @@
   public static final Logger LOG =
     LogFormatter.getLogger("org.apache.hadoop.mapred.LocalJobRunner");
 
-  private NutchFileSystem fs;
+  private FileSystem fs;
   private HashMap jobs = new HashMap();
   private Configuration conf;
 
@@ -84,7 +84,7 @@
           File mapOut = this.mapoutputFile.getOutputFile(mapId, 0);
           File reduceIn = this.mapoutputFile.getInputFile(mapId, reduceId);
           reduceIn.getParentFile().mkdirs();
-          if (!NutchFileSystem.getNamed("local", this.job).rename(mapOut, reduceIn))
+          if (!FileSystem.getNamed("local", this.job).rename(mapOut, reduceIn))
             throw new IOException("Couldn't rename " + mapOut);
           this.mapoutputFile.removeAll(mapId);
         }
@@ -151,7 +151,7 @@
   }
 
   public LocalJobRunner(Configuration conf) throws IOException {
-    this.fs = NutchFileSystem.get(conf);
+    this.fs = FileSystem.get(conf);
     this.conf = conf;
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 /*******************************
  * Some handy constants

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 import java.util.Arrays;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.WritableComparable;
@@ -29,7 +29,7 @@
 
 public class MapFileOutputFormat implements OutputFormat {
 
-  public RecordWriter getRecordWriter(NutchFileSystem fs, JobConf job,
+  public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
                                       String name) throws IOException {
 
     File file = new File(job.getOutputDir(), name);
@@ -53,7 +53,7 @@
   }
 
   /** Open the output generated by this format. */
-  public static MapFile.Reader[] getReaders(NutchFileSystem fs, File dir, Configuration conf)
+  public static MapFile.Reader[] getReaders(FileSystem fs, File dir, Configuration conf)
     throws IOException {
     File[] names = fs.listFiles(dir);
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 
@@ -94,7 +94,7 @@
     // write the length-prefixed file content to the wire
     File file = getOutputFile(mapTaskId, partition);
     out.writeLong(file.length());
-    NFSDataInputStream in = NutchFileSystem.getNamed("local", this.jobConf).open(file);
+    FSDataInputStream in = FileSystem.getNamed("local", this.jobConf).open(file);
     try {
       byte[] buffer = new byte[8192];
       int l;
@@ -118,7 +118,7 @@
     long length = in.readLong();
     float progPerByte = 1.0f / length;
     long unread = length;
-    NFSDataOutputStream out = NutchFileSystem.getNamed("local", this.jobConf).create(file);
+    FSDataOutputStream out = FileSystem.getNamed("local", this.jobConf).create(file);
     try {
       byte[] buffer = new byte[8192];
       while (unread > 0) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.*;
 import java.net.*;
@@ -22,7 +22,7 @@
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 /** A Map task. */
 public class MapTask extends Task {
@@ -68,7 +68,7 @@
     try {
       for (int i = 0; i < partitions; i++) {
         outs[i] =
-          new SequenceFile.Writer(NutchFileSystem.getNamed("local", job),
+          new SequenceFile.Writer(FileSystem.getNamed("local", job),
                                   this.mapOutputFile.getOutputFile(getTaskId(), i).toString(),
                                   job.getOutputKeyClass(),
                                   job.getOutputValueClass());
@@ -97,7 +97,7 @@
 
       final RecordReader rawIn =                  // open input
         job.getInputFormat().getRecordReader
-        (NutchFileSystem.get(job), split, job, reporter);
+        (FileSystem.get(job), split, job, reporter);
 
       RecordReader in = new RecordReader() {      // wrap in progress reporter
           private float perByte = 1.0f /(float)split.getLength();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,15 +14,15 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 /** An output data format.  Output files are stored in a {@link
- * NutchFileSystem}. */
+ * FileSystem}. */
 public interface OutputFormat {
 
   /** Construct a {@link RecordWriter}.
@@ -32,7 +32,7 @@
    * @param name the unique name for this part of the output
    * @return a {@link RecordWriter}
    */
-  RecordWriter getRecordWriter(NutchFileSystem fs, JobConf job, String name)
+  RecordWriter getRecordWriter(FileSystem fs, JobConf job, String name)
     throws IOException;
 }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.DataOutput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Fri Feb  3 12:34:32 2006
@@ -14,11 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.util.*;
 
 import java.io.*;
 import java.net.*;
@@ -170,7 +171,7 @@
     Class valueClass = job.getOutputValueClass();
     Reducer reducer = (Reducer)job.newInstance(job.getReducerClass());
     reducer.configure(job);
-    NutchFileSystem lfs = NutchFileSystem.getNamed("local", job);
+    FileSystem lfs = FileSystem.getNamed("local", job);
 
     copyPhase.complete();                         // copy is already complete
 
@@ -254,7 +255,7 @@
     // make output collector
     String name = getOutputName(getPartition());
     final RecordWriter out =
-      job.getOutputFormat().getRecordWriter(NutchFileSystem.get(job), job, name);
+      job.getOutputFormat().getRecordWriter(FileSystem.get(job), job, name);
     OutputCollector collector = new OutputCollector() {
         public void collect(WritableComparable key, Writable value)
           throws IOException {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java Fri Feb  3 12:34:32 2006
@@ -13,12 +13,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.*;
 import org.apache.hadoop.conf.*;
-import org.apache.hadoop.util.LogFormatter;
+import org.apache.hadoop.util.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.MapFile;
@@ -35,7 +35,7 @@
     setMinSplitSize(SequenceFile.SYNC_INTERVAL);
   }
 
-  protected File[] listFiles(NutchFileSystem fs, JobConf job)
+  protected File[] listFiles(FileSystem fs, JobConf job)
     throws IOException {
 
     File[] files = super.listFiles(fs, job);
@@ -48,7 +48,7 @@
     return files;
   }
 
-  public RecordReader getRecordReader(NutchFileSystem fs, FileSplit split,
+  public RecordReader getRecordReader(FileSystem fs, FileSplit split,
                                       JobConf job, Reporter reporter)
     throws IOException {
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 import java.util.Arrays;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.SequenceFile;
@@ -30,7 +30,7 @@
 
 public class SequenceFileOutputFormat implements OutputFormat {
 
-  public RecordWriter getRecordWriter(NutchFileSystem fs, JobConf job,
+  public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
                                       String name) throws IOException {
 
     File file = new File(job.getOutputDir(), name);
@@ -56,7 +56,7 @@
   /** Open the output generated by this format. */
   public static SequenceFile.Reader[] getReaders(Configuration conf, File dir)
     throws IOException {
-    NutchFileSystem fs = NutchFileSystem.get(conf);
+    FileSystem fs = FileSystem.get(conf);
     File[] names = fs.listFiles(dir);
     
     // sort names, so that hash partitioning works

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java Fri Feb  3 12:34:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileSystem;
 
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Writable;
@@ -36,7 +36,7 @@
 
   public SequenceFileRecordReader(Configuration conf, FileSplit split)
     throws IOException {
-    NutchFileSystem fs = NutchFileSystem.get(conf);
+    FileSystem fs = FileSystem.get(conf);
     this.in = new SequenceFile.Reader(fs, split.getFile().toString(), conf);
     this.end = split.getStart() + split.getLength();
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java Fri Feb  3 12:34:32 2006
@@ -14,10 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -363,7 +363,7 @@
         } else {
             try {
                 if (isMapTask()) {
-                    NutchFileSystem fs = NutchFileSystem.get(conf);
+                    FileSystem fs = FileSystem.get(conf);
                     String hints[][] = fs.getFileCacheHints(split.getFile(), split.getStart(), split.getLength());
                     for (int i = 0; i < hints.length; i++) {
                         for (int j = 0; j < hints[i].length; j++) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.ipc.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- package org.apache.hadoop.mapred.
+ package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
@@ -60,7 +60,7 @@
     boolean justStarted = true;
 
     static Random r = new Random();
-    NutchFileSystem fs = null;
+    FileSystem fs = null;
     static final String SUBDIR = "taskTracker";
 
     private Configuration fConf;
@@ -233,7 +233,7 @@
             // Xmit the heartbeat
             //
             if (justStarted) {
-                this.fs = NutchFileSystem.getNamed(jobClient.getFilesystemName(), this.fConf);
+                this.fs = FileSystem.getNamed(jobClient.getFilesystemName(), this.fConf);
             }
             
             int resultCode = jobClient.emitHeartbeat(new TaskTrackerStatus(taskTrackerName, localHostname, mapOutputPort, taskReports), justStarted);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java Fri Feb  3 12:34:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.hadoop.fs.NutchFileSystem;
-import org.apache.hadoop.fs.NFSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataInputStream;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -32,7 +32,7 @@
  * the position in the file, and values are the line of text.. */
 public class TextInputFormat extends InputFormatBase {
 
-  public RecordReader getRecordReader(NutchFileSystem fs, FileSplit split,
+  public RecordReader getRecordReader(FileSystem fs, FileSplit split,
                                       JobConf job, Reporter reporter)
     throws IOException {
 
@@ -42,7 +42,7 @@
     final long end = start + split.getLength();
 
     // open the file and seek to the start of the split
-    final NFSDataInputStream in = fs.open(split.getFile());
+    final FSDataInputStream in = fs.open(split.getFile());
     
     if (start != 0) {
       in.seek(start-1);
@@ -76,7 +76,7 @@
       };
   }
 
-  private static String readLine(NFSDataInputStream in) throws IOException {
+  private static String readLine(FSDataInputStream in) throws IOException {
     StringBuffer buffer = new StringBuffer();
     while (true) {
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java Fri Feb  3 12:34:32 2006
@@ -14,25 +14,25 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.hadoop.fs.NutchFileSystem;
-import org.apache.hadoop.fs.NFSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataOutputStream;
 
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
 
 public class TextOutputFormat implements OutputFormat {
 
-  public RecordWriter getRecordWriter(NutchFileSystem fs, JobConf job,
+  public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
                                       String name) throws IOException {
 
     File file = new File(job.getOutputDir(), name);
 
-    final NFSDataOutputStream out = fs.create(file);
+    final FSDataOutputStream out = fs.create(file);
 
     return new RecordWriter() {
         public synchronized void write(WritableComparable key, Writable value)

Copied: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFS.java (from r374735, lucene/hadoop/trunk/src/test/org/apache/hadoop/ndfs/TestNDFS.java)
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFS.java?p2=lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFS.java&p1=lucene/hadoop/trunk/src/test/org/apache/hadoop/ndfs/TestNDFS.java&r1=374735&r2=374738&rev=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ndfs/TestNDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFS.java Fri Feb  3 12:34:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.dfs.
+package org.apache.hadoop.dfs;
 
 import junit.framework.TestCase;
 import junit.framework.AssertionFailedError;
-import org.apache.hadoop.fs.NFSInputStream;
-import org.apache.hadoop.fs.NFSOutputStream;
+import org.apache.hadoop.fs.FSInputStream;
+import org.apache.hadoop.fs.FSOutputStream;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.util.LogFormatter;
@@ -65,7 +65,7 @@
  * <p>Bring the namenode down and restart it to verify that datanodes reconnect.
  * <p>
  * <p>For a another approach to filesystem testing, see the high level
- * (NutchFS level) test {@link org.apache.hadoop.fs.TestNutchFileSystem}.
+ * (NutchFS level) test {@link org.apache.hadoop.fs.TestFileSystem}.
  * @author Paul Baclace
  */
 public class TestDFS extends TestCase implements FSConstants {
@@ -255,7 +255,7 @@
         for (int iFileNumber = 0; iFileNumber < numFiles; iFileNumber++) {
           testFileName = new UTF8("/f" + iFileNumber);
           testfilesList.add(testFileName);
-          NFSOutputStream nos = dfsClient.create(testFileName, false);
+          FSOutputStream nos = dfsClient.create(testFileName, false);
           try {
             for (long nBytesWritten = 0L;
                  nBytesWritten < nBytes;
@@ -304,7 +304,7 @@
         ListIterator li = testfilesList.listIterator();
         while (li.hasNext()) {
           testFileName = (UTF8) li.next();
-          NFSInputStream nis = dfsClient.open(testFileName);
+          FSInputStream nis = dfsClient.open(testFileName);
           byte[] bufferGolden = new byte[bufferSize];
           int m = 42;
           try {

Copied: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java (from r374735, lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestNutchFileSystem.java)
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java?p2=lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java&p1=lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestNutchFileSystem.java&r1=374735&r2=374738&rev=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestNutchFileSystem.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.fs.
+package org.apache.hadoop.fs;
 
 import java.io.*;
 import java.util.*;
@@ -27,7 +27,7 @@
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
 
-public class TestNutchFileSystem extends TestCase {
+public class TestFileSystem extends TestCase {
   private static final Logger LOG = InputFormatBase.LOG;
 
   private static Configuration conf = new Configuration();
@@ -49,7 +49,7 @@
   public static void testFs(long megaBytes, int numFiles, long seed)
     throws Exception {
 
-    NutchFileSystem fs = NutchFileSystem.get(conf);
+    FileSystem fs = FileSystem.get(conf);
 
     if (seed == 0)
       seed = new Random().nextLong();
@@ -62,7 +62,7 @@
     seekTest(fs, false);
   }
 
-  public static void createControlFile(NutchFileSystem fs,
+  public static void createControlFile(FileSystem fs,
                                        long megaBytes, int numFiles,
                                        long seed) throws Exception {
 
@@ -102,12 +102,12 @@
   public static class WriteMapper extends Configured implements Mapper {
     private Random random = new Random();
     private byte[] buffer = new byte[BUFFER_SIZE];
-    private NutchFileSystem fs;
+    private FileSystem fs;
     private boolean fastCheck;
     
     {
       try {
-        fs = NutchFileSystem.get(conf);
+        fs = FileSystem.get(conf);
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
@@ -158,7 +158,7 @@
     }
   }
 
-  public static void writeTest(NutchFileSystem fs, boolean fastCheck)
+  public static void writeTest(FileSystem fs, boolean fastCheck)
     throws Exception {
 
     fs.delete(DATA_DIR);
@@ -186,12 +186,12 @@
     private Random random = new Random();
     private byte[] buffer = new byte[BUFFER_SIZE];
     private byte[] check  = new byte[BUFFER_SIZE];
-    private NutchFileSystem fs;
+    private FileSystem fs;
     private boolean fastCheck;
 
     {
       try {
-        fs = NutchFileSystem.get(conf);
+        fs = FileSystem.get(conf);
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
@@ -250,7 +250,7 @@
     }
   }
 
-  public static void readTest(NutchFileSystem fs, boolean fastCheck)
+  public static void readTest(FileSystem fs, boolean fastCheck)
     throws Exception {
 
     fs.delete(READ_DIR);
@@ -278,12 +278,12 @@
   public static class SeekMapper extends Configured implements Mapper {
     private Random random = new Random();
     private byte[] check  = new byte[BUFFER_SIZE];
-    private NutchFileSystem fs;
+    private FileSystem fs;
     private boolean fastCheck;
 
     {
       try {
-        fs = NutchFileSystem.get(conf);
+        fs = FileSystem.get(conf);
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
@@ -309,7 +309,7 @@
 
       reporter.setStatus("opening " + name);
 
-      NFSDataInputStream in = fs.open(new File(DATA_DIR, name));
+      FSDataInputStream in = fs.open(new File(DATA_DIR, name));
         
       try {
         for (int i = 0; i < SEEKS_PER_FILE; i++) {
@@ -342,7 +342,7 @@
     }
   }
 
-  public static void seekTest(NutchFileSystem fs, boolean fastCheck)
+  public static void seekTest(FileSystem fs, boolean fastCheck)
     throws Exception {
 
     fs.delete(READ_DIR);
@@ -375,7 +375,7 @@
     boolean fastCheck = false;
     long seed = new Random().nextLong();
 
-    String usage = "Usage: TestNutchFileSystem -files N -megaBytes M [-noread] [-nowrite] [-noseek] [-fastcheck]";
+    String usage = "Usage: TestFileSystem -files N -megaBytes M [-noread] [-nowrite] [-noseek] [-fastcheck]";
     
     if (args.length == 0) {
         System.err.println(usage);
@@ -401,7 +401,7 @@
     LOG.info("files = " + files);
     LOG.info("megaBytes = " + megaBytes);
   
-    NutchFileSystem fs = NutchFileSystem.get(conf);
+    FileSystem fs = FileSystem.get(conf);
 
     if (!noWrite) {
       createControlFile(fs, megaBytes*MEGA, files, seed);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/RandomDatum.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/RandomDatum.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/RandomDatum.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/RandomDatum.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.util.*;
 import java.io.*;

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import java.io.*;
 import java.util.*;
@@ -36,17 +36,17 @@
 
   public void testArrayFile() throws Exception {
       Configuration conf = new Configuration();
-    NutchFileSystem nfs = new LocalFileSystem(conf);
+    FileSystem fs = new LocalFileSystem(conf);
     RandomDatum[] data = generate(10000);
-    writeTest(nfs, data, FILE);
-    readTest(nfs, data, FILE, conf);
+    writeTest(fs, data, FILE);
+    readTest(fs, data, FILE, conf);
   }
 
   public void testEmptyFile() throws Exception {
     Configuration conf = new Configuration();
-    NutchFileSystem nfs = new LocalFileSystem(conf);
-    writeTest(nfs, new RandomDatum[0], FILE);
-    ArrayFile.Reader reader = new ArrayFile.Reader(nfs, FILE, conf);
+    FileSystem fs = new LocalFileSystem(conf);
+    writeTest(fs, new RandomDatum[0], FILE);
+    ArrayFile.Reader reader = new ArrayFile.Reader(fs, FILE, conf);
     assertNull(reader.get(0, new RandomDatum()));
     reader.close();
   }
@@ -62,22 +62,22 @@
     return data;
   }
 
-  private static void writeTest(NutchFileSystem nfs, RandomDatum[] data, String file)
+  private static void writeTest(FileSystem fs, RandomDatum[] data, String file)
     throws IOException {
-    MapFile.delete(nfs, file);
+    MapFile.delete(fs, file);
     LOG.fine("creating with " + data.length + " records");
-    ArrayFile.Writer writer = new ArrayFile.Writer(nfs, file, RandomDatum.class);
+    ArrayFile.Writer writer = new ArrayFile.Writer(fs, file, RandomDatum.class);
     writer.setIndexInterval(100);
     for (int i = 0; i < data.length; i++)
       writer.append(data[i]);
     writer.close();
   }
 
-  private static void readTest(NutchFileSystem nfs, RandomDatum[] data, String file, Configuration conf)
+  private static void readTest(FileSystem fs, RandomDatum[] data, String file, Configuration conf)
     throws IOException {
     RandomDatum v = new RandomDatum();
     LOG.fine("reading " + data.length + " records");
-    ArrayFile.Reader reader = new ArrayFile.Reader(nfs, file, conf);
+    ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
     for (int i = 0; i < data.length; i++) {       // try forwards
       reader.get(i, v);
       if (!v.equals(data[i])) {
@@ -110,7 +110,7 @@
 
     Configuration conf = new Configuration();
     int i = 0;
-    NutchFileSystem nfs = NutchFileSystem.parseArgs(args, i, conf);
+    FileSystem fs = FileSystem.parseArgs(args, i, conf);
     try {
         for (; i < args.length; i++) {       // parse command line
             if (args[i] == null) {
@@ -137,14 +137,14 @@
         RandomDatum[] data = generate(count);
 
         if (create) {
-            writeTest(nfs, data, file);
+            writeTest(fs, data, file);
         }
 
         if (check) {
-            readTest(nfs, data, file, conf);
+            readTest(fs, data, file, conf);
         }
     } finally {
-        nfs.close();
+        fs.close();
     }
   }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestMD5Hash.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestMD5Hash.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestMD5Hash.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestMD5Hash.java Fri Feb  3 12:34:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
 
 import org.apache.hadoop.io.TestWritable;
 import junit.framework.TestCase;