You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/02/03 20:45:51 UTC

svn commit: r374733 [3/4] - in /lucene/hadoop/trunk: ./ bin/ conf/ lib/ lib/jetty-ext/ src/java/ src/java/org/ src/java/org/apache/ src/java/org/apache/hadoop/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/...

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.util.HashMap;
 import java.io.IOException;
@@ -32,11 +32,6 @@
     WritableName.setName(LongWritable.class, "long");
     WritableName.setName(UTF8.class, "UTF8");
     WritableName.setName(MD5Hash.class, "MD5Hash");
-    WritableName.setName
-      (org.apache.nutch.fetcher.FetcherOutput.class, "FetcherOutput");
-    WritableName.setName(org.apache.nutch.protocol.Content.class, "Content");
-    WritableName.setName(org.apache.nutch.parse.ParseText.class, "ParseText");
-    WritableName.setName(org.apache.nutch.parse.ParseData.class, "ParseData");
   }
 
   private WritableName() {}                      // no public ctor

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.ipc;
+package org.apache.hadoop.ipc.
 
 import java.net.Socket;
 import java.net.InetSocketAddress;
@@ -33,11 +33,11 @@
 import java.util.logging.Logger;
 import java.util.logging.Level;
 
-import org.apache.nutch.util.LogFormatter;
-import org.apache.nutch.util.NutchConf;
-import org.apache.nutch.util.NutchConfigurable;
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.UTF8;
+import org.apache.hadoop.util.LogFormatter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.UTF8;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
@@ -48,7 +48,7 @@
  */
 public class Client {
   public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.ipc.Client");
+    LogFormatter.getLogger("org.apache.hadoop.ipc.Client");
 
   private Hashtable connections = new Hashtable();
 
@@ -56,7 +56,7 @@
   private int timeout ;// timeout for calls
   private int counter;                            // counter for call ids
   private boolean running = true;                 // true while client runs
-  private NutchConf nutchConf;
+  private Configuration conf;
 
   /** A call waiting for a value. */
   private class Call {
@@ -162,8 +162,8 @@
             Writable value = makeValue();
             try {
               readingCall = call;
-              if(value instanceof NutchConfigurable) {
-                ((NutchConfigurable) value).setConf(nutchConf);
+              if(value instanceof Configurable) {
+                ((Configurable) value).setConf(conf);
               }
               value.readFields(in);                 // read value
             } finally {
@@ -261,10 +261,10 @@
 
   /** Construct an IPC client whose values are of the given {@link Writable}
    * class. */
-  public Client(Class valueClass, NutchConf nutchConf) {
+  public Client(Class valueClass, Configuration conf) {
     this.valueClass = valueClass;
-    this.timeout = nutchConf.getInt("ipc.client.timeout",10000);
-    this.nutchConf = nutchConf;
+    this.timeout = conf.getInt("ipc.client.timeout",10000);
+    this.conf = conf;
   }
 
   /** Stop all threads related to this client.  No further calls may be made

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.ipc;
+package org.apache.hadoop.ipc.
 
 import java.lang.reflect.Proxy;
 import java.lang.reflect.Method;
@@ -27,8 +27,9 @@
 import java.io.*;
 import java.util.*;
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 /** A simple RPC mechanism.
  *
@@ -50,17 +51,17 @@
  */
 public class RPC {
   private static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.ipc.RPC");
+    LogFormatter.getLogger("org.apache.hadoop.ipc.RPC");
 
   private RPC() {}                                  // no public ctor
 
 
   /** A method invocation, including the method name and its parameters.*/
-  private static class Invocation implements Writable, NutchConfigurable {
+  private static class Invocation implements Writable, Configurable {
     private String methodName;
     private Class[] parameterClasses;
     private Object[] parameters;
-    private NutchConf nutchConf;
+    private Configuration conf;
 
     public Invocation() {}
 
@@ -85,7 +86,7 @@
       parameterClasses = new Class[parameters.length];
       ObjectWritable objectWritable = new ObjectWritable();
       for (int i = 0; i < parameters.length; i++) {
-        parameters[i] = ObjectWritable.readObject(in, objectWritable, this.nutchConf);
+        parameters[i] = ObjectWritable.readObject(in, objectWritable, this.conf);
         parameterClasses[i] = objectWritable.getDeclaredClass();
       }
     }
@@ -111,12 +112,12 @@
       return buffer.toString();
     }
 
-    public void setConf(NutchConf conf) {
-      this.nutchConf = conf;
+    public void setConf(Configuration conf) {
+      this.conf = conf;
     }
 
-    public NutchConf getConf() {
-      return this.nutchConf;
+    public Configuration getConf() {
+      return this.conf;
     }
 
   }
@@ -127,12 +128,12 @@
   private static class Invoker implements InvocationHandler {
     private InetSocketAddress address;
 
-    public Invoker(InetSocketAddress address, NutchConf nutchConf) {
+    public Invoker(InetSocketAddress address, Configuration conf) {
       this.address = address;
-      CLIENT = (Client) nutchConf.getObject(Client.class.getName());
+      CLIENT = (Client) conf.getObject(Client.class.getName());
       if(CLIENT == null) {
-          CLIENT = new Client(ObjectWritable.class, nutchConf);
-          nutchConf.setObject(Client.class.getName(), CLIENT);
+          CLIENT = new Client(ObjectWritable.class, conf);
+          conf.setObject(Client.class.getName(), CLIENT);
       }
     }
 
@@ -146,24 +147,24 @@
 
   /** Construct a client-side proxy object that implements the named protocol,
    * talking to a server at the named address. */
-  public static Object getProxy(Class protocol, InetSocketAddress addr, NutchConf nutchConf) {
+  public static Object getProxy(Class protocol, InetSocketAddress addr, Configuration conf) {
     return Proxy.newProxyInstance(protocol.getClassLoader(),
                                   new Class[] { protocol },
-                                  new Invoker(addr, nutchConf));
+                                  new Invoker(addr, conf));
   }
 
   /** Expert: Make multiple, parallel calls to a set of servers. */
   public static Object[] call(Method method, Object[][] params,
-                              InetSocketAddress[] addrs, NutchConf nutchConf)
+                              InetSocketAddress[] addrs, Configuration conf)
     throws IOException {
 
     Invocation[] invocations = new Invocation[params.length];
     for (int i = 0; i < params.length; i++)
       invocations[i] = new Invocation(method, params[i]);
-    CLIENT = (Client) nutchConf.getObject(Client.class.getName());
+    CLIENT = (Client) conf.getObject(Client.class.getName());
     if(CLIENT == null) {
-        CLIENT = new Client(ObjectWritable.class, nutchConf);
-        nutchConf.setObject(Client.class.getName(), CLIENT);
+        CLIENT = new Client(ObjectWritable.class, conf);
+        conf.setObject(Client.class.getName(), CLIENT);
     }
     Writable[] wrappedValues = CLIENT.call(invocations, addrs);
     
@@ -183,16 +184,16 @@
 
   /** Construct a server for a protocol implementation instance listening on a
    * port. */
-  public static Server getServer(final Object instance, final int port, NutchConf nutchConf) {
-    return getServer(instance, port, 1, false, nutchConf);
+  public static Server getServer(final Object instance, final int port, Configuration conf) {
+    return getServer(instance, port, 1, false, conf);
   }
 
   /** Construct a server for a protocol implementation instance listening on a
    * port. */
   public static Server getServer(final Object instance, final int port,
                                  final int numHandlers,
-                                 final boolean verbose, NutchConf nutchConf) {
-    return new Server(port, Invocation.class, numHandlers, nutchConf) {
+                                 final boolean verbose, Configuration conf) {
+    return new Server(port, Invocation.class, numHandlers, conf) {
         
         Class implementation = instance.getClass();
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.ipc;
+package org.apache.hadoop.ipc.
 
 import java.io.IOException;
 import java.io.EOFException;
@@ -32,10 +32,10 @@
 import java.util.logging.Logger;
 import java.util.logging.Level;
 
-import org.apache.nutch.util.LogFormatter;
-import org.apache.nutch.util.NutchConf;
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.UTF8;
+import org.apache.hadoop.util.LogFormatter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.UTF8;
 
 /** An abstract IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
@@ -46,7 +46,7 @@
  */
 public abstract class Server {
   public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.ipc.Server");
+    LogFormatter.getLogger("org.apache.hadoop.ipc.Server");
 
   private int port;                               // port we listen on
   private int handlerCount;                       // number of handler threads
@@ -228,12 +228,12 @@
    * be of the named class.  The <code>handlerCount</handlerCount> determines
    * the number of handler threads that will be used to process calls.
    */
-  protected Server(int port, Class paramClass, int handlerCount, NutchConf nutchConf) {
+  protected Server(int port, Class paramClass, int handlerCount, Configuration conf) {
     this.port = port;
     this.paramClass = paramClass;
     this.handlerCount = handlerCount;
     this.maxQueuedCalls = handlerCount;
-    this.timeout = nutchConf.getInt("ipc.client.timeout",10000); 
+    this.timeout = conf.getInt("ipc.client.timeout",10000); 
   }
 
   /** Sets the timeout used for network i/o. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/CombiningCollector.java Fri Feb  3 11:45:32 2006
@@ -14,14 +14,14 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
 
 /** Implements partial value reduction during mapping.  This can minimize the
  * size of intermediate data.  Buffers a list of values for each unique key,

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java Fri Feb  3 11:45:32 2006
@@ -14,16 +14,16 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 import java.io.DataInput;
 import java.io.DataOutput;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.UTF8;
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.fs.NutchFileSystem;
 
 /** A section of an input file.  Returned by {@link
  * InputFormat#getSplits(NutchFileSystem, JobConf, int)} and passed to

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NutchFileSystem;
 
 /** An input data format.  Input files are stored in a {@link NutchFileSystem}.
  * The processing of an input file may be split across multiple machines.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
@@ -23,14 +23,14 @@
 import java.util.ArrayList;
 import java.util.logging.Logger;
 
-import org.apache.nutch.fs.NutchFileSystem;
-import org.apache.nutch.util.LogFormatter;
+import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.util.LogFormatter;
 
 /** A base class for {@link InputFormat}. */
 public abstract class InputFormatBase implements InputFormat {
 
   public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.mapred.InputFormatBase");
+    LogFormatter.getLogger("org.apache.hadoop.mapred.InputFormatBase");
 
   private static final double SPLIT_SLOP = 0.1;   // 10% slop
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java Fri Feb  3 11:45:32 2006
@@ -14,10 +14,10 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.*;
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 /** 
  * Protocol that a TaskTracker and the central JobTracker use to communicate.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -34,7 +35,7 @@
  * @author Mike Cafarella
  *******************************************************/
 public class JobClient implements MRConstants {
-    private static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.mapred.JobClient");
+    private static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.mapred.JobClient");
 
     static long MAX_JOBPROFILE_AGE = 1000 * 2;
 
@@ -166,14 +167,14 @@
     JobSubmissionProtocol jobSubmitClient;
     NutchFileSystem fs = null;
 
-    private NutchConf nutchConf;
+    private Configuration conf;
     static Random r = new Random();
 
     /**
      * Build a job client, connect to the default job tracker
      */
-    public JobClient(NutchConf conf) throws IOException {
-      this.nutchConf = conf;
+    public JobClient(Configuration conf) throws IOException {
+      this.conf = conf;
       String tracker = conf.get("mapred.job.tracker", "local");
       if ("local".equals(tracker)) {
         this.jobSubmitClient = new LocalJobRunner(conf);
@@ -187,9 +188,9 @@
     /**
      * Build a job client, connect to the indicated job tracker.
      */
-    public JobClient(InetSocketAddress jobTrackAddr, NutchConf nutchConf) throws IOException {
+    public JobClient(InetSocketAddress jobTrackAddr, Configuration conf) throws IOException {
         this.jobSubmitClient = (JobSubmissionProtocol) 
-            RPC.getProxy(JobSubmissionProtocol.class, jobTrackAddr, nutchConf);
+            RPC.getProxy(JobSubmissionProtocol.class, jobTrackAddr, conf);
     }
 
 
@@ -209,7 +210,7 @@
     public synchronized NutchFileSystem getFs() throws IOException {
       if (this.fs == null) {
         String fsName = jobSubmitClient.getFilesystemName();
-        this.fs = NutchFileSystem.getNamed(fsName, this.nutchConf);
+        this.fs = NutchFileSystem.getNamed(fsName, this.conf);
       }
       return fs;
     }
@@ -229,7 +230,7 @@
     public RunningJob submitJob(JobConf job) throws IOException {
         //
         // First figure out what fs the JobTracker is using.  Copy the
-        // job to it, under a temporary name.  This allows NDFS to work,
+        // job to it, under a temporary name.  This allows DFS to work,
         // and under the local fs also provides UNIX-like object loading 
         // semantics.  (that is, if the job file is deleted right after
         // submission, we can still run the submission to completion)
@@ -347,7 +348,7 @@
         }
 
         // Submit the request
-        JobClient jc = new JobClient(new NutchConf());
+        JobClient jc = new JobClient(new Configuration());
         try {
             if (submitJobFile != null) {
                 RunningJob job = jc.submitJob(submitJobFile);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 
 import java.io.IOException;
@@ -31,25 +31,25 @@
 import java.util.List;
 import java.util.Collections;
 
-import org.apache.nutch.fs.NutchFileSystem;
-import org.apache.nutch.fs.FileUtil;
-import org.apache.nutch.util.NutchConf;
-
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.WritableComparator;
-import org.apache.nutch.io.LongWritable;
-import org.apache.nutch.io.UTF8;
-
-import org.apache.nutch.mapred.lib.IdentityMapper;
-import org.apache.nutch.mapred.lib.IdentityReducer;
-import org.apache.nutch.mapred.lib.HashPartitioner;
+import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.conf.Configuration;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.UTF8;
+
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapred.lib.HashPartitioner;
 
 /** A map/reduce job configuration.  This names the {@link Mapper}, combiner
  * (if any), {@link Partitioner}, {@link Reducer}, {@link InputFormat}, and
  * {@link OutputFormat} implementations to be used.  It also indicates the set
  * of input files, and where the output files should be written. */
-public class JobConf extends NutchConf {
+public class JobConf extends Configuration {
 
   public JobConf() {
     super();
@@ -59,9 +59,9 @@
    * Construct a map/reduce job configuration.
    * 
    * @param conf
-   *          a NutchConf whose settings will be inherited.
+   *          a Configuration whose settings will be inherited.
    */
-  public JobConf(NutchConf conf) {
+  public JobConf(Configuration conf) {
     super(conf);
     addConfResource("mapred-default.xml");
   }
@@ -69,7 +69,7 @@
 
   /** Construct a map/reduce configuration.
    *
-   * @param config a NutchConf-format XML job description file
+   * @param config a Configuration-format XML job description file
    */
   public JobConf(String config) {
     this(new File(config));
@@ -77,7 +77,7 @@
 
   /** Construct a map/reduce configuration.
    *
-   * @param config a NutchConf-format XML job description file
+   * @param config a Configuration-format XML job description file
    */
   public JobConf(File config) {
     super();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConfigurable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 /** That what may be configured. */
 public interface JobConfigurable {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -32,7 +33,7 @@
 // doing bookkeeping of its Tasks.
 ///////////////////////////////////////////////////////
 public class JobInProgress {
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.mapred.JobInProgress");
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.mapred.JobInProgress");
 
     JobProfile profile;
     JobStatus status;
@@ -49,24 +50,24 @@
     long finishTime;
     String deleteUponCompletion = null;
 
-    NutchConf nutchConf;
+    Configuration conf;
     boolean tasksInited = false;
 
     /**
      * Create a JobInProgress with the given job file, plus a handle
      * to the tracker.
      */
-    public JobInProgress(String jobFile, JobTracker jobtracker, NutchConf nutchConf) throws IOException {
+    public JobInProgress(String jobFile, JobTracker jobtracker, Configuration conf) throws IOException {
         String jobid = "job_" + jobtracker.createUniqueId();
         String url = "http://" + jobtracker.getJobTrackerMachine() + ":" + jobtracker.getInfoPort() + "/jobdetails.jsp?jobid=" + jobid;
-        this.nutchConf = nutchConf;
+        this.conf = conf;
         this.jobtracker = jobtracker;
         this.profile = new JobProfile(jobid, jobFile, url);
         this.status = new JobStatus(jobid, 0.0f, 0.0f, JobStatus.RUNNING);
         this.startTime = System.currentTimeMillis();
 
-        this.localJobFile = new JobConf(nutchConf).getLocalFile(JobTracker.SUBDIR, jobid + ".xml");
-        NutchFileSystem fs = NutchFileSystem.get(nutchConf);
+        this.localJobFile = new JobConf(conf).getLocalFile(JobTracker.SUBDIR, jobid + ".xml");
+        NutchFileSystem fs = NutchFileSystem.get(conf);
         fs.copyToLocalFile(new File(jobFile), localJobFile);
 
         JobConf jd = new JobConf(localJobFile);
@@ -98,7 +99,7 @@
         String jobFile = profile.getJobFile();
 
         JobConf jd = new JobConf(localJobFile);
-        NutchFileSystem fs = NutchFileSystem.get(nutchConf);
+        NutchFileSystem fs = NutchFileSystem.get(conf);
         FileSplit[] splits =
             jd.getInputFormat().getSplits(fs, jd, numMapTasks);
 
@@ -120,7 +121,7 @@
         // create a map task for each split
         this.maps = new TaskInProgress[numMapTasks];
         for (int i = 0; i < numMapTasks; i++) {
-            maps[i] = new TaskInProgress(jobFile, splits[i], jobtracker, nutchConf, this);
+            maps[i] = new TaskInProgress(jobFile, splits[i], jobtracker, conf, this);
         }
 
         //
@@ -128,7 +129,7 @@
         //
         this.reduces = new TaskInProgress[numReduceTasks];
         for (int i = 0; i < numReduceTasks; i++) {
-            reduces[i] = new TaskInProgress(jobFile, maps, i, jobtracker, nutchConf, this);
+            reduces[i] = new TaskInProgress(jobFile, maps, i, jobtracker, conf, this);
         }
 
         tasksInited = true;
@@ -426,7 +427,7 @@
         //
         if (deleteUponCompletion != null) {
             JobConf jd = new JobConf(deleteUponCompletion);
-            NutchFileSystem fs = NutchFileSystem.get(nutchConf);
+            NutchFileSystem fs = NutchFileSystem.get(conf);
             fs.delete(new File(jd.getJar()));
             fs.delete(new File(deleteUponCompletion));
             deleteUponCompletion = null;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java Fri Feb  3 11:45:32 2006
@@ -13,9 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobStatus.java Fri Feb  3 11:45:32 2006
@@ -13,9 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobSubmissionProtocol.java Fri Feb  3 11:45:32 2006
@@ -14,11 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.*;
 import java.util.*;
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 /** 
  * Protocol that a JobClient and the central JobTracker use to communicate.  The
@@ -56,7 +56,7 @@
     /**
      * A MapReduce system always operates on a single filesystem.  This 
      * function returns the fs name.  ('local' if the localfs; 'addr:port' 
-     * if ndfs).  The client can then copy files into the right locations 
+     * if dfs).  The client can then copy files into the right locations 
      * prior to submitting the job.
      */
     public String getFilesystemName() throws IOException;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Fri Feb  3 11:45:32 2006
@@ -13,13 +13,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -39,10 +40,10 @@
     static float PAD_FRACTION;
     static float MIN_SLOTS_FOR_PADDING;
 
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.mapred.JobTracker");
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.mapred.JobTracker");
 
     private static JobTracker tracker = null;
-    public static void startTracker(NutchConf conf) throws IOException {
+    public static void startTracker(Configuration conf) throws IOException {
       if (tracker != null)
         throw new IOException("JobTracker already running.");
       while (true) {
@@ -258,12 +259,12 @@
     static final String SUBDIR = "jobTracker";
     NutchFileSystem fs;
     File systemDir;
-    private NutchConf nutchConf;
+    private Configuration conf;
 
     /**
      * Start the JobTracker process, listen on the indicated port
      */
-    JobTracker(NutchConf conf) throws IOException {
+    JobTracker(Configuration conf) throws IOException {
         //
         // Grab some static constants
         //
@@ -276,7 +277,7 @@
 
         // This is a directory of temporary submission files.  We delete it
         // on startup, and can delete any files that we're done with
-        this.nutchConf = conf;
+        this.conf = conf;
         JobConf jobConf = new JobConf(conf);
         this.systemDir = jobConf.getSystemDir();
         this.fs = NutchFileSystem.get(conf);
@@ -309,7 +310,7 @@
         new Thread(this.retireJobs).start();
     }
 
-    public static InetSocketAddress getAddress(NutchConf conf) {
+    public static InetSocketAddress getAddress(Configuration conf) {
       String jobTrackerStr =
         conf.get("mapred.job.tracker", "localhost:8012");
       int colon = jobTrackerStr.indexOf(":");
@@ -484,7 +485,7 @@
      * a task we'd like the TaskTracker to execute right now.
      *
      * Eventually this function should compute load on the various TaskTrackers,
-     * and incorporate knowledge of NDFS file placement.  But for right now, it
+     * and incorporate knowledge of DFS file placement.  But for right now, it
      * just grabs a single item out of the pending task list and hands it back.
      */
     public synchronized Task pollForNewTask(String taskTracker) {
@@ -738,7 +739,7 @@
      * the JobTracker alone.
      */
     JobInProgress createJob(String jobFile) throws IOException {
-        JobInProgress job = new JobInProgress(jobFile, this, this.nutchConf);
+        JobInProgress job = new JobInProgress(jobFile, this, this.conf);
         jobs.put(job.getProfile().getJobId(), job);
         jobsByArrival.add(job);
         return job;
@@ -802,7 +803,7 @@
 
     /**
      * Start the JobTracker process.  This is used only for debugging.  As a rule,
-     * JobTracker should be run as part of the NDFS Namenode process.
+     * JobTracker should be run as part of the DFS Namenode process.
      */
     public static void main(String argv[]) throws IOException, InterruptedException {
         if (argv.length != 0) {
@@ -810,6 +811,6 @@
           System.exit(-1);
         }
 
-        startTracker(new NutchConf());
+        startTracker(new Configuration());
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTrackerInfoServer.java Fri Feb  3 11:45:32 2006
@@ -13,11 +13,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
 
 import org.mortbay.util.*;
 import org.mortbay.http.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java Fri Feb  3 11:45:32 2006
@@ -14,24 +14,25 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.*;
 import java.util.*;
 import java.util.logging.*;
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 /** Implements MapReduce locally, in-process, for debugging. */ 
 public class LocalJobRunner implements JobSubmissionProtocol {
   public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.mapred.LocalJobRunner");
+    LogFormatter.getLogger("org.apache.hadoop.mapred.LocalJobRunner");
 
   private NutchFileSystem fs;
   private HashMap jobs = new HashMap();
-  private NutchConf nutchConf;
+  private Configuration conf;
 
   private class Job extends Thread
     implements TaskUmbilicalProtocol {
@@ -43,13 +44,13 @@
     private ArrayList mapIds = new ArrayList();
     private MapOutputFile mapoutputFile;
 
-    public Job(String file, NutchConf nutchConf) throws IOException {
+    public Job(String file, Configuration conf) throws IOException {
       this.file = file;
       this.id = "job_" + newId();
       this.mapoutputFile = new MapOutputFile();
-      this.mapoutputFile.setConf(nutchConf);
+      this.mapoutputFile.setConf(conf);
 
-      File localFile = new JobConf(nutchConf).getLocalFile("localRunner", id+".xml");
+      File localFile = new JobConf(conf).getLocalFile("localRunner", id+".xml");
       fs.copyToLocalFile(new File(file), localFile);
       this.job = new JobConf(localFile);
       
@@ -149,15 +150,15 @@
 
   }
 
-  public LocalJobRunner(NutchConf nutchConf) throws IOException {
-    this.fs = NutchFileSystem.get(nutchConf);
-    this.nutchConf = nutchConf;
+  public LocalJobRunner(Configuration conf) throws IOException {
+    this.fs = NutchFileSystem.get(conf);
+    this.conf = conf;
   }
 
   // JobSubmissionProtocol methods
 
   public JobStatus submitJob(String jobFile) throws IOException {
-    return new Job(jobFile, this.nutchConf).status;
+    return new Job(jobFile, this.conf).status;
   }
 
   public void killJob(String id) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MRConstants.java Fri Feb  3 11:45:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 /*******************************
  * Some handy constants

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,18 +14,18 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 import java.util.Arrays;
 
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NutchFileSystem;
 
-import org.apache.nutch.io.MapFile;
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.io.MapFile;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.conf.Configuration;
 
 public class MapFileOutputFormat implements OutputFormat {
 
@@ -53,7 +53,7 @@
   }
 
   /** Open the output generated by this format. */
-  public static MapFile.Reader[] getReaders(NutchFileSystem fs, File dir, NutchConf nutchConf)
+  public static MapFile.Reader[] getReaders(NutchFileSystem fs, File dir, Configuration conf)
     throws IOException {
     File[] names = fs.listFiles(dir);
     
@@ -62,7 +62,7 @@
     
     MapFile.Reader[] parts = new MapFile.Reader[names.length];
     for (int i = 0; i < names.length; i++) {
-      parts[i] = new MapFile.Reader(fs, names[i].toString(), nutchConf);
+      parts[i] = new MapFile.Reader(fs, names[i].toString(), conf);
     }
     return parts;
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java Fri Feb  3 11:45:32 2006
@@ -14,17 +14,17 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
 import java.io.*;
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
 
 /** A local file to be transferred via the {@link MapOutputProtocol}. */ 
-public class MapOutputFile implements Writable, NutchConfigurable {
+public class MapOutputFile implements Writable, Configurable {
   private String mapTaskId;
   private String reduceTaskId;
   private int partition;
@@ -135,11 +135,11 @@
     }
   }
 
-  public void setConf(NutchConf conf) {
+  public void setConf(Configuration conf) {
     this.jobConf = new JobConf(conf);
   }
 
-  public NutchConf getConf() {
+  public Configuration getConf() {
     return this.jobConf;
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputLocation.java Fri Feb  3 11:45:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
 import java.io.*;
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
 
 /** The location of a map output file, as passed to a reduce task via the
  * {@link InterTrackerProtocol}. */ 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputProtocol.java Fri Feb  3 11:45:32 2006
@@ -14,11 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 /** Protocol that a reduce task uses to retrieve output data from a map task's
  * tracker. */ 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunnable.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 /** Expert: Permits greater control of map processing. For example,
  * implementations might perform multi-threaded, asynchronous mappings. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 /** Default {@link MapRunnable} implementation.*/
 public class MapRunner implements MapRunnable {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java Fri Feb  3 11:45:32 2006
@@ -14,21 +14,21 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.NutchConf;
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.NutchFileSystem;
 
 /** A Map task. */
 public class MapTask extends Task {
   private FileSplit split;
   private MapOutputFile mapOutputFile;
-  private NutchConf nutchConf;
+  private Configuration conf;
 
   public MapTask() {}
 
@@ -42,7 +42,7 @@
   }
 
   public TaskRunner createRunner(TaskTracker tracker) {
-    return new MapTaskRunner(this, tracker, this.nutchConf);
+    return new MapTaskRunner(this, tracker, this.conf);
   }
 
   public FileSplit getSplit() { return split; }
@@ -138,14 +138,14 @@
     done(umbilical);
   }
 
-  public void setConf(NutchConf conf) {
-    this.nutchConf = conf;
+  public void setConf(Configuration conf) {
+    this.conf = conf;
     this.mapOutputFile = new MapOutputFile();
     this.mapOutputFile.setConf(conf);
   }
 
-  public NutchConf getConf() {
-    return this.nutchConf;
+  public Configuration getConf() {
+    return this.conf;
   }
   
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java Fri Feb  3 11:45:32 2006
@@ -13,11 +13,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
 
 import java.io.*;
 import java.net.*;
@@ -28,10 +28,10 @@
 class MapTaskRunner extends TaskRunner {
   private MapOutputFile mapOutputFile;
 
-  public MapTaskRunner(Task task, TaskTracker tracker, NutchConf nutchConf) {
-    super(task, tracker, nutchConf);
+  public MapTaskRunner(Task task, TaskTracker tracker, Configuration conf) {
+    super(task, tracker, conf);
     this.mapOutputFile = new MapOutputFile();
-    this.mapOutputFile.setConf(nutchConf);
+    this.mapOutputFile.setConf(conf);
   }
   
   /** Delete any temporary files from previous failed attempts. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Mapper.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 /** Maps input key/value pairs to a set of intermediate key/value pairs.  All
  * intermediate values associated with a given output key are subsequently

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputCollector.java Fri Feb  3 11:45:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.DataInput;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 
 /** Passed to {@link Mapper} and {@link Reducer} implementations to collect

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NutchFileSystem;
 
 /** An output data format.  Output files are stored in a {@link
  * NutchFileSystem}. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java Fri Feb  3 11:45:32 2006
@@ -14,10 +14,10 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 /** Partitions the key space.  A partition is created for each reduce task. */
 public interface Partitioner extends JobConfigurable {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java Fri Feb  3 11:45:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.DataInput;
 
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
 
 /** Reads key/value pairs from an input file {@link FileSplit}.
  * Implemented by {@link InputFormat} implementations. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java Fri Feb  3 11:45:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.DataOutput;
 
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
 
 /** Writes key/value pairs to an output file.  Implemented by {@link
  * OutputFormat} implementations. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Fri Feb  3 11:45:32 2006
@@ -14,11 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.*;
-import org.apache.nutch.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.fs.*;
 
 import java.io.*;
 import java.net.*;
@@ -37,7 +37,7 @@
   private Progress appendPhase = getProgress().addPhase("append");
   private Progress sortPhase  = getProgress().addPhase("sort");
   private Progress reducePhase = getProgress().addPhase("reduce");
-  private NutchConf nutchConf;
+  private Configuration conf;
   private MapOutputFile mapOutputFile;
 
   public ReduceTask() {}
@@ -50,7 +50,7 @@
   }
 
   public TaskRunner createRunner(TaskTracker tracker) {
-    return new ReduceTaskRunner(this, tracker, this.nutchConf);
+    return new ReduceTaskRunner(this, tracker, this.conf);
   }
 
   public boolean isMapTask() {
@@ -297,14 +297,14 @@
     return "part-" + NUMBER_FORMAT.format(partition);
   }
 
-  public void setConf(NutchConf conf) {
-    this.nutchConf = conf;
+  public void setConf(Configuration conf) {
+    this.conf = conf;
     this.mapOutputFile = new MapOutputFile();
     this.mapOutputFile.setConf(conf);
   }
 
-  public NutchConf getConf() {
-    return this.nutchConf;
+  public Configuration getConf() {
+    return this.conf;
   }
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java Fri Feb  3 11:45:32 2006
@@ -13,11 +13,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -27,13 +28,13 @@
 /** Runs a reduce task. */
 class ReduceTaskRunner extends TaskRunner {
   private static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.mapred.ReduceTaskRunner");
+    LogFormatter.getLogger("org.apache.hadoop.mapred.ReduceTaskRunner");
   private MapOutputFile mapOutputFile;
 
-  public ReduceTaskRunner(Task task, TaskTracker tracker, NutchConf nutchConf) {
-    super(task, tracker, nutchConf);
+  public ReduceTaskRunner(Task task, TaskTracker tracker, Configuration conf) {
+    super(task, tracker, conf);
     this.mapOutputFile = new MapOutputFile();
-    this.mapOutputFile.setConf(nutchConf);
+    this.mapOutputFile.setConf(conf);
   }
 
   /** Assemble all of the map output files. */
@@ -81,7 +82,7 @@
         InetSocketAddress addr =
           new InetSocketAddress(loc.getHost(), loc.getPort());
         MapOutputProtocol client =
-          (MapOutputProtocol)RPC.getProxy(MapOutputProtocol.class, addr, this.nutchConf);
+          (MapOutputProtocol)RPC.getProxy(MapOutputProtocol.class, addr, this.conf);
 
         this.mapOutputFile.setProgressReporter(new MapOutputFile.ProgressReporter() {
             public void progress(float progress) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reducer.java Fri Feb  3 11:45:32 2006
@@ -14,14 +14,14 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
 import java.util.Iterator;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 /** Reduces a set of intermediate values which share a key to a smaller set of
  * values.  Input values are the grouped output of a {@link Mapper}. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Reporter.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/RunningJob.java Fri Feb  3 11:45:32 2006
@@ -14,9 +14,9 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,19 +14,19 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NutchFileSystem;
 
-import org.apache.nutch.io.SequenceFile;
-import org.apache.nutch.io.MapFile;
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.LongWritable;
-import org.apache.nutch.io.UTF8;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.MapFile;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.UTF8;
 
 /** An {@link InputFormat} for {@link SequenceFile}s. */
 public class SequenceFileInputFormat extends InputFormatBase {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,19 +14,19 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 import java.util.Arrays;
 
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NutchFileSystem;
 
-import org.apache.nutch.io.MapFile;
-import org.apache.nutch.io.SequenceFile;
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.io.MapFile;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.conf.Configuration;
 
 public class SequenceFileOutputFormat implements OutputFormat {
 
@@ -54,9 +54,9 @@
   }
 
   /** Open the output generated by this format. */
-  public static SequenceFile.Reader[] getReaders(NutchConf nutchConf, File dir)
+  public static SequenceFile.Reader[] getReaders(Configuration conf, File dir)
     throws IOException {
-    NutchFileSystem fs = NutchFileSystem.get(nutchConf);
+    NutchFileSystem fs = NutchFileSystem.get(conf);
     File[] names = fs.listFiles(dir);
     
     // sort names, so that hash partitioning works
@@ -64,7 +64,7 @@
     
     SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];
     for (int i = 0; i < names.length; i++) {
-      parts[i] = new SequenceFile.Reader(fs, names[i].toString(), nutchConf);
+      parts[i] = new SequenceFile.Reader(fs, names[i].toString(), conf);
     }
     return parts;
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java Fri Feb  3 11:45:32 2006
@@ -14,19 +14,19 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.nutch.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NutchFileSystem;
 
-import org.apache.nutch.io.SequenceFile;
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.LongWritable;
-import org.apache.nutch.io.UTF8;
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.conf.Configuration;
 
 /** An {@link RecordReader} for {@link SequenceFile}s. */
 public class SequenceFileRecordReader implements RecordReader {
@@ -34,10 +34,10 @@
   private long end;
   private boolean more = true;
 
-  public SequenceFileRecordReader(NutchConf nutchConf, FileSplit split)
+  public SequenceFileRecordReader(Configuration conf, FileSplit split)
     throws IOException {
-    NutchFileSystem fs = NutchFileSystem.get(nutchConf);
-    this.in = new SequenceFile.Reader(fs, split.getFile().toString(), nutchConf);
+    NutchFileSystem fs = NutchFileSystem.get(conf);
+    this.in = new SequenceFile.Reader(fs, split.getFile().toString(), conf);
     this.end = split.getStart() + split.getLength();
 
     if (split.getStart() > in.getPosition())

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java Fri Feb  3 11:45:32 2006
@@ -14,17 +14,17 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
 
 /** Base class for tasks. */
-public abstract class Task implements Writable, NutchConfigurable {
+public abstract class Task implements Writable, Configurable {
   ////////////////////////////////////////////
   // Fields
   ////////////////////////////////////////////

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -46,7 +47,7 @@
     static final double SPECULATIVE_GAP = 0.2;
     static final long SPECULATIVE_LAG = 60 * 1000;
 
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.mapred.TaskInProgress");
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.mapred.TaskInProgress");
 
     // Defines the TIP
     String jobFile = null;
@@ -66,7 +67,7 @@
     boolean failed = false;
     TreeSet usableTaskIds = new TreeSet();
     TreeSet recentTasks = new TreeSet();
-    NutchConf nutchConf;
+    Configuration conf;
     
     TreeMap taskDiagnosticData = new TreeMap();
     TreeMap taskStatuses = new TreeMap();
@@ -77,25 +78,25 @@
     /**
      * Constructor for MapTask
      */
-    public TaskInProgress(String jobFile, FileSplit split, JobTracker jobtracker, NutchConf nutchConf, JobInProgress job) {
+    public TaskInProgress(String jobFile, FileSplit split, JobTracker jobtracker, Configuration conf, JobInProgress job) {
         this.jobFile = jobFile;
         this.split = split;
         this.jobtracker = jobtracker;
         this.job = job;
-        this.nutchConf = nutchConf;
+        this.conf = conf;
         init();
     }
         
     /**
      * Constructor for ReduceTask
      */
-    public TaskInProgress(String jobFile, TaskInProgress predecessors[], int partition, JobTracker jobtracker, NutchConf nutchConf, JobInProgress job) {
+    public TaskInProgress(String jobFile, TaskInProgress predecessors[], int partition, JobTracker jobtracker, Configuration conf, JobInProgress job) {
         this.jobFile = jobFile;
         this.predecessors = predecessors;
         this.partition = partition;
         this.jobtracker = jobtracker;
         this.job = job;
-        this.nutchConf = nutchConf;
+        this.conf = conf;
         init();
     }
 
@@ -353,7 +354,7 @@
     // to do something.
     /////////////////////////////////////////////////
     /**
-     * Return whether this TIP has an NDFS cache-driven task 
+     * Return whether this TIP has an DFS cache-driven task 
      * to run at the given taskTracker.
      */
     boolean hasTaskWithCacheHit(String taskTracker, TaskTrackerStatus tts) {
@@ -362,7 +363,7 @@
         } else {
             try {
                 if (isMapTask()) {
-                    NutchFileSystem fs = NutchFileSystem.get(nutchConf);
+                    NutchFileSystem fs = NutchFileSystem.get(conf);
                     String hints[][] = fs.getFileCacheHints(split.getFile(), split.getStart(), split.getLength());
                     for (int i = 0; i < hints.length; i++) {
                         for (int j = 0; j < hints[i].length; j++) {
@@ -433,7 +434,7 @@
                 }
                 t = new ReduceTask(jobFile, taskid, mapIdPredecessors, partition);
             }
-            t.setConf(nutchConf);
+            t.setConf(conf);
 
             recentTasks.add(taskid);
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
-import org.apache.nutch.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
+import org.apache.hadoop.fs.*;
 
 import java.io.*;
 import java.net.*;
@@ -31,19 +32,19 @@
  */
 abstract class TaskRunner extends Thread {
   public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.mapred.TaskRunner");
+    LogFormatter.getLogger("org.apache.hadoop.mapred.TaskRunner");
 
   boolean killed = false;
   private Process process;
   private Task t;
   private TaskTracker tracker;
 
-  protected NutchConf nutchConf;
+  protected Configuration conf;
 
-  public TaskRunner(Task t, TaskTracker tracker, NutchConf nutchConf) {
+  public TaskRunner(Task t, TaskTracker tracker, Configuration conf) {
     this.t = t;
     this.tracker = tracker;
-    this.nutchConf = nutchConf;
+    this.conf = conf;
   }
 
   public Task getTask() { return t; }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskStatus.java Fri Feb  3 11:45:32 2006
@@ -13,9 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- package org.apache.nutch.mapred;
+ package org.apache.hadoop.mapred.
 
-import org.apache.nutch.fs.*;
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -39,7 +40,7 @@
     static final int STALE_STATE = 1;
 
     public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.mapred.TaskTracker");
+    LogFormatter.getLogger("org.apache.hadoop.mapred.TaskTracker");
 
     private boolean running = true;
 
@@ -62,7 +63,7 @@
     NutchFileSystem fs = null;
     static final String SUBDIR = "taskTracker";
 
-    private NutchConf fConf;
+    private Configuration fConf;
     private MapOutputFile mapOutputFile;
 
     private int maxCurrentTasks;
@@ -70,14 +71,14 @@
     /**
      * Start with the local machine name, and the default JobTracker
      */
-    public TaskTracker(NutchConf conf) throws IOException {
+    public TaskTracker(Configuration conf) throws IOException {
       this(JobTracker.getAddress(conf), conf);
     }
 
     /**
      * Start with the local machine name, and the addr of the target JobTracker
      */
-    public TaskTracker(InetSocketAddress jobTrackAddr, NutchConf conf) throws IOException {
+    public TaskTracker(InetSocketAddress jobTrackAddr, Configuration conf) throws IOException {
         maxCurrentTasks = conf.getInt("mapred.tasktracker.tasks.maximum", 2);
 
         this.fConf = conf;
@@ -352,10 +353,10 @@
 
         /**
          */
-        public TaskInProgress(Task task, NutchConf nutchConf) throws IOException {
+        public TaskInProgress(Task task, Configuration conf) throws IOException {
             this.task = task;
             this.lastProgressReport = System.currentTimeMillis();
-            this.jobConf = new JobConf(nutchConf);
+            this.jobConf = new JobConf(conf);
             this.jobConf.deleteLocalFiles(SUBDIR + File.separator + task.getTaskId());
             localizeTask(task);
         }
@@ -618,17 +619,17 @@
           LogFormatter.showTime(false);
           LOG.info("Child starting");
 
-          NutchConf nutchConf = new NutchConf();
+          Configuration conf = new Configuration();
           int port = Integer.parseInt(args[0]);
           String taskid = args[1];
           TaskUmbilicalProtocol umbilical =
             (TaskUmbilicalProtocol)RPC.getProxy(TaskUmbilicalProtocol.class,
-                                                new InetSocketAddress(port), nutchConf);
+                                                new InetSocketAddress(port), conf);
             
           Task task = umbilical.getTask(taskid);
           JobConf job = new JobConf(task.getJobFile());
 
-          nutchConf.addConfResource(new File(task.getJobFile()));
+          conf.addConfResource(new File(task.getJobFile()));
 
           startPinging(umbilical, taskid);        // start pinging parent
 
@@ -679,7 +680,7 @@
             System.exit(-1);
         }
 
-        TaskTracker tt = new TaskTracker(new NutchConf());
+        TaskTracker tt = new TaskTracker(new Configuration());
         tt.run();
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java Fri Feb  3 11:45:32 2006
@@ -13,9 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 import java.net.*;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java Fri Feb  3 11:45:32 2006
@@ -14,11 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 /** Protocol that task child process uses to contact its parent process.  The
  * parent is a daemon which which polls the central master for a new map or

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,18 +14,18 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.nutch.fs.NutchFileSystem;
-import org.apache.nutch.fs.NFSDataInputStream;
+import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NFSDataInputStream;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.LongWritable;
-import org.apache.nutch.io.UTF8;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.UTF8;
 
 /** An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java Fri Feb  3 11:45:32 2006
@@ -14,16 +14,16 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred;
+package org.apache.hadoop.mapred.
 
 import java.io.IOException;
 import java.io.File;
 
-import org.apache.nutch.fs.NutchFileSystem;
-import org.apache.nutch.fs.NFSDataOutputStream;
+import org.apache.hadoop.fs.NutchFileSystem;
+import org.apache.hadoop.fs.NFSDataOutputStream;
 
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
 
 public class TextOutputFormat implements OutputFormat {
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/demo/Grep.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/demo/Grep.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/demo/Grep.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/demo/Grep.java Fri Feb  3 11:45:32 2006
@@ -13,24 +13,24 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.mapred.demo;
+package org.apache.hadoop.mapred.demo;
 
-import org.apache.nutch.mapred.JobConf;
-import org.apache.nutch.mapred.JobClient;
-import org.apache.nutch.mapred.RunningJob;
-import org.apache.nutch.mapred.SequenceFileOutputFormat;
-import org.apache.nutch.mapred.SequenceFileInputFormat;
-
-import org.apache.nutch.mapred.lib.RegexMapper;
-import org.apache.nutch.mapred.lib.InverseMapper;
-import org.apache.nutch.mapred.lib.LongSumReducer;
-import org.apache.nutch.mapred.lib.IdentityReducer;
-
-import org.apache.nutch.io.UTF8;
-import org.apache.nutch.io.LongWritable;
-import org.apache.nutch.io.WritableComparator;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+
+import org.apache.hadoop.mapred.lib.RegexMapper;
+import org.apache.hadoop.mapred.lib.InverseMapper;
+import org.apache.hadoop.mapred.lib.LongSumReducer;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+
+import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.WritableComparator;
 
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.conf.Configuration;
 
 import java.io.File;
 import java.util.Random;
@@ -45,7 +45,7 @@
       System.exit(-1);
     }
 
-    NutchConf defaults = new NutchConf();
+    Configuration defaults = new Configuration();
 
     File tempDir =
       new File("grep-temp-"+

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java Fri Feb  3 11:45:32 2006
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred.lib;
+package org.apache.hadoop.mapred.lib;
 
-import org.apache.nutch.mapred.Partitioner;
-import org.apache.nutch.mapred.JobConf;
+import org.apache.hadoop.mapred.Partitioner;
+import org.apache.hadoop.mapred.JobConf;
 
-import org.apache.nutch.io.WritableComparable;
-import org.apache.nutch.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
 
 /** Partition keys by their {@link Object#hashCode()}. */
 public class HashPartitioner implements Partitioner {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java Fri Feb  3 11:45:32 2006
@@ -14,17 +14,17 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.mapred.lib;
+package org.apache.hadoop.mapred.lib;
 
 import java.io.IOException;
 
-import org.apache.nutch.mapred.Mapper;
-import org.apache.nutch.mapred.OutputCollector;
-import org.apache.nutch.mapred.JobConf;
-import org.apache.nutch.mapred.Reporter;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
 
-import org.apache.nutch.io.Writable;
-import org.apache.nutch.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 
 /** Implements the identity function, mapping inputs directly to outputs. */
 public class IdentityMapper implements Mapper {