You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mahout.apache.org by sr...@apache.org on 2013/09/24 14:05:33 UTC

svn commit: r1525865 - in /mahout/trunk/integration/src/test/java/org/apache/mahout/text: LuceneSegmentInputFormatTest.java LuceneSegmentRecordReaderTest.java

Author: srowen
Date: Tue Sep 24 12:05:33 2013
New Revision: 1525865

URL: http://svn.apache.org/r1525865
Log:
MAHOUT-1340 fix two tests to work with Hadoop 2 as well

Modified:
    mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentInputFormatTest.java
    mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java

Modified: mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentInputFormatTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentInputFormatTest.java?rev=1525865&r1=1525864&r2=1525865&view=diff
==============================================================================
--- mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentInputFormatTest.java (original)
+++ mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentInputFormatTest.java Tue Sep 24 12:05:33 2013
@@ -28,6 +28,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 import java.util.Collections;
 import java.util.List;
 
@@ -38,12 +39,12 @@ public class LuceneSegmentInputFormatTes
   private Configuration conf;
 
   @Before
-  public void before() throws IOException {
+  public void before() throws Exception {
     inputFormat = new LuceneSegmentInputFormat();
     LuceneStorageConfiguration lucene2SeqConf = new LuceneStorageConfiguration(new Configuration(), Collections.singletonList(indexPath1), new Path("output"), "id", Collections.singletonList("field"));
     conf = lucene2SeqConf.serialize();
 
-    jobContext = new JobContext(conf, new JobID());
+    jobContext = getJobContext(conf, new JobID());
   }
 
   @After
@@ -65,4 +66,19 @@ public class LuceneSegmentInputFormatTes
     List<LuceneSegmentInputSplit> splits = inputFormat.getSplits(jobContext);
     Assert.assertEquals(3, splits.size());
   }
+
+  // Use reflection to abstract this incompatibility between Hadoop 1 & 2 APIs.
+  private JobContext getJobContext(Configuration conf, JobID jobID) throws
+      ClassNotFoundException, NoSuchMethodException, IllegalAccessException,
+      InvocationTargetException, InstantiationException {
+    Class<? extends JobContext> clazz = null;
+    if (!JobContext.class.isInterface()) {
+      clazz = JobContext.class;
+    } else {
+      clazz = (Class<? extends JobContext>)
+          Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl");
+    }
+    return clazz.getConstructor(Configuration.class, JobID.class)
+        .newInstance(conf, jobID);
+  }
 }

Modified: mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java
URL: http://svn.apache.org/viewvc/mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java?rev=1525865&r1=1525864&r2=1525865&view=diff
==============================================================================
--- mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java (original)
+++ mahout/trunk/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java Tue Sep 24 12:05:33 2013
@@ -29,6 +29,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 
 import static java.util.Arrays.asList;
 
@@ -58,7 +59,7 @@ public class LuceneSegmentRecordReaderTe
     for (SegmentInfoPerCommit segmentInfo : segmentInfos) {
       int docId = 0;
       LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(), segmentInfo.info.name, segmentInfo.sizeInBytes());
-      TaskAttemptContext context = new TaskAttemptContext(configuration, new TaskAttemptID());
+      TaskAttemptContext context = getTaskAttemptContext(configuration, new TaskAttemptID());
       recordReader.initialize(inputSplit, context);
       for (int i = 0; i < 500; i++){
         recordReader.nextKeyValue();
@@ -69,4 +70,19 @@ public class LuceneSegmentRecordReaderTe
       }
     }
   }
+
+  // Use reflection to abstract this incompatibility between Hadoop 1 & 2 APIs.
+  private TaskAttemptContext getTaskAttemptContext(Configuration conf, TaskAttemptID jobID) throws
+      ClassNotFoundException, NoSuchMethodException, IllegalAccessException,
+      InvocationTargetException, InstantiationException {
+    Class<? extends TaskAttemptContext> clazz = null;
+    if (!TaskAttemptContext.class.isInterface()) {
+      clazz = TaskAttemptContext.class;
+    } else {
+      clazz = (Class<? extends TaskAttemptContext>)
+          Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
+    }
+    return clazz.getConstructor(Configuration.class, TaskAttemptID.class)
+        .newInstance(conf, jobID);
+  }
 }