You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2013/08/02 23:41:01 UTC

svn commit: r1509873 - in /hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase: HBaseTestingUtility.java mapreduce/MapreduceTestingShim.java

Author: enis
Date: Fri Aug  2 21:41:01 2013
New Revision: 1509873

URL: http://svn.apache.org/r1509873
Log:
HBASE-9075 [0.94] Backport HBASE-5760 Unit tests should write only under /target to 0.94 (addendum patch to fix Hadoop2 build)

Modified:
    hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
    hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java

Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=1509873&r1=1509872&r2=1509873&view=diff
==============================================================================
--- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original)
+++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Fri Aug  2 21:41:01 2013
@@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.ServerManager;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -1439,8 +1440,14 @@ public class HBaseTestingUtility {
 
     mrCluster = new MiniMRCluster(0, 0, servers,
       FileSystem.get(conf).getUri().toString(), 1, null, null, null, new JobConf(conf));
-    mrCluster.getJobTrackerRunner().getJobTracker().getConf().set("mapred.local.dir",
+
+    JobConf jobConf = MapreduceTestingShim.getJobConf(mrCluster);
+    if (jobConf == null) {
+      jobConf = mrCluster.createJobConf();
+    }
+    jobConf.set("mapred.local.dir",
       conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
+
     LOG.info("Mini mapreduce cluster started");
     JobConf mrClusterJobConf = mrCluster.createJobConf();
     c.set("mapred.job.tracker", mrClusterJobConf.get("mapred.job.tracker"));

Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java?rev=1509873&r1=1509872&r2=1509873&view=diff
==============================================================================
--- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java (original)
+++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java Fri Aug  2 21:41:01 2013
@@ -19,9 +19,12 @@ package org.apache.hadoop.hbase.mapreduc
 
 import java.io.IOException;
 import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobID;
@@ -29,11 +32,12 @@ import org.apache.hadoop.mapreduce.JobID
 /**
  * This class provides shims for HBase to interact with the Hadoop 1.0.x and the
  * Hadoop 0.23.x series.
- * 
+ *
  * NOTE: No testing done against 0.22.x, or 0.21.x.
  */
 abstract public class MapreduceTestingShim {
   private static MapreduceTestingShim instance;
+  private static Class[] emptyParam = new Class[] {};
 
   static {
     try {
@@ -49,11 +53,17 @@ abstract public class MapreduceTestingSh
   abstract public JobContext newJobContext(Configuration jobConf)
       throws IOException;
 
+  abstract public JobConf obtainJobConf(MiniMRCluster cluster);
+
   public static JobContext createJobContext(Configuration jobConf)
       throws IOException {
     return instance.newJobContext(jobConf);
   }
 
+  public static JobConf getJobConf(MiniMRCluster cluster) {
+    return instance.obtainJobConf(cluster);
+  }
+
   private static class MapreduceV1Shim extends MapreduceTestingShim {
     public JobContext newJobContext(Configuration jobConf) throws IOException {
       // Implementing:
@@ -68,6 +78,22 @@ abstract public class MapreduceTestingSh
             "Failed to instantiate new JobContext(jobConf, new JobID())", e);
       }
     }
+    public JobConf obtainJobConf(MiniMRCluster cluster) {
+      if (cluster == null) return null;
+      try {
+        Object runner = cluster.getJobTrackerRunner();
+        Method meth = runner.getClass().getDeclaredMethod("getJobTracker", emptyParam);
+        Object tracker = meth.invoke(runner, new Object []{});
+        Method m = tracker.getClass().getDeclaredMethod("getConf", emptyParam);
+        return (JobConf) m.invoke(tracker, new Object []{});
+      } catch (NoSuchMethodException nsme) {
+        return null;
+      } catch (InvocationTargetException ite) {
+        return null;
+      } catch (IllegalAccessException iae) {
+        return null;
+      }
+    }
   };
 
   private static class MapreduceV2Shim extends MapreduceTestingShim {
@@ -83,6 +109,18 @@ abstract public class MapreduceTestingSh
             "Failed to return from Job.getInstance(jobConf)");
       }
     }
+    public JobConf obtainJobConf(MiniMRCluster cluster) {
+      try {
+        Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);
+        return (JobConf) meth.invoke(cluster, new Object []{});
+      } catch (NoSuchMethodException nsme) {
+        return null;
+      } catch (InvocationTargetException ite) {
+        return null;
+      } catch (IllegalAccessException iae) {
+        return null;
+      }
+    }
   };
 
 }