You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ab...@apache.org on 2006/12/06 10:56:24 UTC

svn commit: r482999 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/io/MapFile.java src/java/org/apache/hadoop/ipc/Client.java src/java/org/apache/hadoop/ipc/Server.java src/java/org/apache/hadoop/mapred/ReduceTask.java

Author: ab
Date: Wed Dec  6 01:56:23 2006
New Revision: 482999

URL: http://svn.apache.org/viewvc?view=rev&rev=482999
Log:
HADOOP-780 - Use ReflectionUtils to instantiate key and value objects.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=482999&r1=482998&r2=482999
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Dec  6 01:56:23 2006
@@ -1,5 +1,10 @@
 Hadoop Change Log
 
+Trunk (unreleased changes)
+
+ 1. HADOOP-780. Use ReflectionUtils to instantiate key and value
+    objects. (ab)
+
 
 Release 0.9.0 - 2006-12-01
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java?view=diff&rev=482999&r1=482998&r2=482999
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java Wed Dec  6 01:56:23 2006
@@ -22,6 +22,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 
 /** A file-based map from keys to values.
@@ -470,8 +471,8 @@
               ", got " + dataReader.getValueClass().getName());
     }
     long cnt = 0L;
-    Writable key = (Writable)keyClass.getConstructor(new Class[0]).newInstance(new Object[0]);
-    Writable value = (Writable)valueClass.getConstructor(new Class[0]).newInstance(new Object[0]);
+    Writable key = (Writable)ReflectionUtils.newInstance(keyClass, conf);
+    Writable value = (Writable)ReflectionUtils.newInstance(valueClass, conf);
     SequenceFile.Writer indexWriter = null;
     if (!dryrun) indexWriter = SequenceFile.createWriter(fs, conf, index, keyClass, LongWritable.class);
     try {
@@ -510,11 +511,11 @@
     FileSystem fs = new LocalFileSystem(conf);
     MapFile.Reader reader = new MapFile.Reader(fs, in, conf);
     MapFile.Writer writer =
-      new MapFile.Writer(fs, out, reader.getKeyClass(), reader.getValueClass());
+      new MapFile.Writer(conf, fs, out, reader.getKeyClass(), reader.getValueClass());
 
     WritableComparable key =
-      (WritableComparable)reader.getKeyClass().newInstance();
-    Writable value = (Writable)reader.getValueClass().newInstance();
+      (WritableComparable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+    Writable value = (Writable)ReflectionUtils.newInstance(reader.getValueClass(), conf);
 
     while (reader.next(key, value))               // copy all entries
       writer.append(key, value);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?view=diff&rev=482999&r1=482998&r2=482999
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Wed Dec  6 01:56:23 2006
@@ -44,6 +44,7 @@
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
@@ -259,12 +260,9 @@
                                   WritableUtils.readString(in));
             call.setResult(null, ex);
           } else {
-            Writable value = makeValue();
+            Writable value = (Writable)ReflectionUtils.newInstance(valueClass, conf);
             try {
               readingCall = call;
-              if(value instanceof Configurable) {
-                ((Configurable) value).setConf(conf);
-              }
               value.readFields(in);                 // read value
             } finally {
               readingCall = null;
@@ -526,18 +524,6 @@
     //entire system down.
     connection.setupIOstreams();
     return connection;
-  }
-
-  private Writable makeValue() {
-    Writable value;                             // construct value
-    try {
-      value = (Writable)valueClass.newInstance();
-    } catch (InstantiationException e) {
-      throw new RuntimeException(e.toString());
-    } catch (IllegalAccessException e) {
-      throw new RuntimeException(e.toString());
-    }
-    return value;
   }
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?view=diff&rev=482999&r1=482998&r2=482999
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Wed Dec  6 01:56:23 2006
@@ -460,7 +460,7 @@
       if (LOG.isDebugEnabled())
         LOG.debug(" got #" + id);
             
-      Writable param = makeParam();           // read param
+      Writable param = (Writable)ReflectionUtils.newInstance(paramClass, conf);           // read param
       param.readFields(dis);        
         
       Call call = new Call(id, param, this);
@@ -633,21 +633,5 @@
 
   /** Called for each call. */
   public abstract Writable call(Writable param) throws IOException;
-
   
-  private Writable makeParam() {
-    Writable param;                               // construct param
-    try {
-      param = (Writable)paramClass.newInstance();
-      if (param instanceof Configurable) {
-        ((Configurable)param).setConf(conf);
-      }
-    } catch (InstantiationException e) {
-      throw new RuntimeException(e.toString());
-    } catch (IllegalAccessException e) {
-      throw new RuntimeException(e.toString());
-    }
-    return param;
-  }
-
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?view=diff&rev=482999&r1=482998&r2=482999
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Wed Dec  6 01:56:23 2006
@@ -131,16 +131,19 @@
     private WritableComparator comparator;
     private Class keyClass;
     private Class valClass;
+    private Configuration conf;
     private DataOutputBuffer valOut = new DataOutputBuffer();
     private DataInputBuffer valIn = new DataInputBuffer();
     private DataInputBuffer keyIn = new DataInputBuffer();
 
     public ValuesIterator (SequenceFile.Sorter.RawKeyValueIterator in, 
                            WritableComparator comparator, Class keyClass,
-                           Class valClass, TaskUmbilicalProtocol umbilical)
+                           Class valClass, TaskUmbilicalProtocol umbilical,
+                           Configuration conf)
       throws IOException {
       this.in = in;
       this.umbilical = umbilical;
+      this.conf = conf;
       this.comparator = comparator;
       this.keyClass = keyClass;
       this.valClass = valClass;
@@ -183,8 +186,8 @@
 
       Writable lastKey = key;                     // save previous key
       try {
-        key = (WritableComparable)keyClass.newInstance();
-        value = (Writable)valClass.newInstance();
+        key = (WritableComparable)ReflectionUtils.newInstance(keyClass, this.conf);
+        value = (Writable)ReflectionUtils.newInstance(valClass, this.conf);
       } catch (Exception e) {
         throw new RuntimeException(e);
       }
@@ -298,7 +301,7 @@
       Class keyClass = job.getMapOutputKeyClass();
       Class valClass = job.getMapOutputValueClass();
       ValuesIterator values = new ValuesIterator(rIter, comparator, keyClass, 
-                                                 valClass, umbilical);
+                                                 valClass, umbilical, job);
       while (values.more()) {
         myMetrics.reduceInput();
         reducer.reduce(values.getKey(), values, collector, reporter);