You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/03/20 22:52:51 UTC

svn commit: r1459072 - in /hive/trunk: ./ ql/src/test/org/apache/hadoop/hive/ql/ shims/ shims/src/0.20/java/org/apache/hadoop/hive/shims/ shims/src/0.20S/java/org/apache/hadoop/hive/shims/ shims/src/0.23/java/org/apache/hadoop/hive/shims/ shims/src/com...

Author: hashutosh
Date: Wed Mar 20 21:52:51 2013
New Revision: 1459072

URL: http://svn.apache.org/r1459072
Log:
HIVE-4139 : MiniDFS shim does not work for hadoop 2 (Gunther Hagleitner via Ashutosh Chauhan)

Modified:
    hive/trunk/build-common.xml
    hive/trunk/build.properties
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/trunk/shims/ivy.xml
    hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Wed Mar 20 21:52:51 2013
@@ -463,7 +463,7 @@
       <sysproperty key="java.net.preferIPv4Stack" value="${java.net.preferIPv4Stack}"/>
       <sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}${junit.script.extension}"/>
       <sysproperty key="test.concurrency.num.threads" value="${test.concurrency.num.threads}"/>
-      <jvmarg line="${jvm.args}"/>
+      <jvmarg line="${junit.jvm.args}"/>
 
       <classpath refid="test.local.classpath"/>
       <formatter type="${test.junit.output.format}" usefile="${test.junit.output.usefile}" />

Modified: hive/trunk/build.properties
URL: http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Wed Mar 20 21:52:51 2013
@@ -141,6 +141,9 @@ datanucleus.repo=http://www.datanucleus.
 # JVM arguments
 jvm.args=-XX:-UseSplitVerifier
 
+# junit jvm args
+junit.jvm.args=-XX:-UseSplitVerifier -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=128M
+
 #
 # Eclipse Properties
 #

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Wed Mar 20 21:52:51 2013
@@ -46,7 +46,6 @@ import java.util.TreeMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -79,7 +78,6 @@ import org.apache.hadoop.hive.serde2.thr
 import org.apache.hadoop.hive.serde2.thrift.test.Complex;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -117,7 +115,7 @@ public class QTestUtil {
   private FileSystem fs;
   protected final boolean overWrite;
   private CliDriver cliDriver;
-  private MiniMRCluster mr = null;
+  private HadoopShims.MiniMrShim mr = null;
   private HadoopShims.MiniDFSShim dfs = null;
   private boolean miniMr = false;
   private String hadoopVer = null;
@@ -222,6 +220,9 @@ public class QTestUtil {
     if (miniMr) {
       assert dfs != null;
       assert mr != null;
+
+      mr.setupConfiguration(conf);
+
       // set fs.default.name to the uri of mini-dfs
       String dfsUriString = getHdfsUriString(dfs.getFileSystem().getUri().toString());
       conf.setVar(HiveConf.ConfVars.HADOOPFS, dfsUriString);
@@ -229,25 +230,6 @@ public class QTestUtil {
       conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
                   (new Path(dfsUriString,
                             "/build/ql/test/data/warehouse/")).toString());
-      int port = 0;
-
-      try {
-        // Hadoop20 MiniMRCluster will return a proper port.
-        // Hadoop23 MiniMRCluster does not implement this method so use the default RM port.
-        port = mr.getJobTrackerPort();
-      } catch (UnsupportedOperationException e) {
-        String address =
-            StringUtils.substringAfterLast(conf.get("yarn.resourcemanager.address"), ":");
-
-        if (StringUtils.isBlank(address)) {
-          throw new IllegalArgumentException("Invalid YARN resource manager port.");
-        }
-
-        port = Integer.parseInt(address);
-      }
-
-      ShimLoader.getHadoopShims().setJobLauncherRpcAddress(conf,
-              "localhost:" + port);
     }
   }
 
@@ -299,7 +281,7 @@ public class QTestUtil {
     if (miniMr) {
       dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
       FileSystem fs = dfs.getFileSystem();
-      mr = new MiniMRCluster(4, getHdfsUriString(fs.getUri().toString()), 1);
+      mr = ShimLoader.getHadoopShims().getMiniMrCluster(conf, 4, getHdfsUriString(fs.getUri().toString()), 1);
     }
 
     initConf();
@@ -374,7 +356,7 @@ public class QTestUtil {
   private boolean checkNeedsSort(String fileName, String query) {
     Pattern pattern = Pattern.compile("-- SORT_BEFORE_DIFF");
     Matcher matcher = pattern.matcher(query);
-    
+
     if (matcher.find()) {
       return true;
     }
@@ -840,7 +822,7 @@ public class QTestUtil {
     outfd.write(e.getMessage());
     outfd.close();
 
-    int exitVal = executeDiffCommand(outf.getPath(), expf, false, 
+    int exitVal = executeDiffCommand(outf.getPath(), expf, false,
                                      qSortSet.contains(qf.getName()));
     if (exitVal != 0 && overWrite) {
       exitVal = overwriteResults(outf.getPath(), expf);
@@ -1061,7 +1043,7 @@ public class QTestUtil {
       ) throws Exception {
 
     int result = 0;
-    
+
     if (sortResults) {
       // sort will try to open the output file in write mode on windows. We need to
       // close it first.
@@ -1140,18 +1122,18 @@ public class QTestUtil {
   private static int executeCmd(String[] args, String outFile, String errFile) throws Exception {
     System.out.println("Running: " + org.apache.commons.lang.StringUtils.join(args, ' '));
 
-    PrintStream out = outFile == null ? 
-      SessionState.getConsole().getChildOutStream() : 
+    PrintStream out = outFile == null ?
+      SessionState.getConsole().getChildOutStream() :
       new PrintStream(new FileOutputStream(outFile), true);
-    PrintStream err = errFile == null ? 
-      SessionState.getConsole().getChildErrStream() : 
+    PrintStream err = errFile == null ?
+      SessionState.getConsole().getChildErrStream() :
       new PrintStream(new FileOutputStream(errFile), true);
 
     Process executor = Runtime.getRuntime().exec(args);
 
     StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, err);
     StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, out);
-    
+
     outPrinter.start();
     errPrinter.start();
 

Modified: hive/trunk/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/ivy.xml?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/shims/ivy.xml (original)
+++ hive/trunk/shims/ivy.xml Wed Mar 20 21:52:51 2013
@@ -91,6 +91,36 @@
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
 
+    <!-- jobclient tests dependency -->
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default" transitive="false">
+      <artifact name="hadoop-mapreduce-client-jobclient" ext="jar" />
+      <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+
     <!-- Hadoop 0.20 shim dependencies. Used for building 0.20 shims. -->
     <dependency org="com.google.guava" name="guava" rev="${guava-hadoop20.version}"
                 conf="hadoop0.20.shim->default" transitive="false"/>

Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Wed Mar 20 21:52:51 2013
@@ -52,6 +52,7 @@ import org.apache.hadoop.mapred.InputSpl
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputCommitter;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
@@ -100,6 +101,43 @@ public class Hadoop20Shims implements Ha
     // gone in 20+
   }
 
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+        String nameNode, int numDir) throws IOException {
+      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      return mr.getJobTrackerPort();
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
+    }
+  }
+
   public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
       int numDataNodes,
       boolean format,

Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Wed Mar 20 21:52:51 2013
@@ -26,6 +26,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskLogServlet;
@@ -119,4 +121,71 @@ public class Hadoop20SShims extends Hado
   public short getDefaultReplication(FileSystem fs, Path path) {
     return fs.getDefaultReplication();
   }
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, 
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+        String nameNode, int numDir) throws IOException {
+      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      return mr.getJobTrackerPort();
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
+    }
+  }
+
+  // Don't move this code to the parent class. There's a binary
+  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
+  // need to have two different shim classes even though they are
+  // exactly the same.
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Wed Mar 20 21:52:51 2013
@@ -21,13 +21,17 @@ import java.io.IOException;
 import java.lang.Integer;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.Map;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
@@ -132,4 +136,91 @@ public class Hadoop23Shims extends Hadoo
           throws IOException {
     return Trash.moveToAppropriateTrash(fs, path, conf);
   }
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, 
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+    private final Configuration conf;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers, 
+                      String nameNode, int numDir) throws IOException {
+      this.conf = conf;
+
+      JobConf jConf = new JobConf(conf);
+      jConf.set("yarn.scheduler.capacity.root.queues", "default");
+      jConf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+
+      mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir, null, null, jConf);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      String address = conf.get("yarn.resourcemanager.address");
+      address = StringUtils.substringAfterLast(address, ":");
+
+      if (StringUtils.isBlank(address)) {
+        throw new IllegalArgumentException("Invalid YARN resource manager port.");
+      }
+      
+      return Integer.parseInt(address);
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+    
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      JobConf jConf = mr.createJobConf();
+      for (Map.Entry<String, String> pair: jConf) {
+	//System.out.println("XXX Var: "+pair.getKey() +"="+pair.getValue());
+        //if (conf.get(pair.getKey()) == null) {
+          conf.set(pair.getKey(), pair.getValue());
+	  //}
+      }
+    }
+  }
+  
+  // Don't move this code to the parent class. There's a binary
+  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
+  // need to have two different shim classes even though they are
+  // exactly the same.
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Wed Mar 20 21:52:51 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
@@ -106,33 +105,6 @@ public abstract class HadoopShimsSecure 
     // gone in 20+
   }
 
-  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
-      int numDataNodes,
-      boolean format,
-      String[] racks) throws IOException {
-    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
-  }
-
-  /**
-   * MiniDFSShim.
-   *
-   */
-  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
-    private final MiniDFSCluster cluster;
-
-    public MiniDFSShim(MiniDFSCluster cluster) {
-      this.cluster = cluster;
-    }
-
-    public FileSystem getFileSystem() throws IOException {
-      return cluster.getFileSystem();
-    }
-
-    public void shutdown() {
-      cluster.shutdown();
-    }
-  }
-
   /**
    * We define this function here to make the code compatible between
    * hadoop 0.17 and hadoop 0.20.

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1459072&r1=1459071&r2=1459072&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Wed Mar 20 21:52:51 2013
@@ -113,6 +113,21 @@ public interface HadoopShims {
   long getAccessTime(FileStatus file);
 
   /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
+                                     String nameNode, int numDir) throws IOException;
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public interface MiniMrShim {
+    public int getJobTrackerPort() throws UnsupportedOperationException;
+    public void shutdown() throws IOException;
+    public void setupConfiguration(Configuration conf);
+  }
+
+  /**
    * Returns a shim to wrap MiniDFSCluster. This is necessary since this class
    * was moved from org.apache.hadoop.dfs to org.apache.hadoop.hdfs
    */