You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by th...@apache.org on 2013/08/30 09:28:23 UTC

svn commit: r1518897 - in /hive/trunk: hcatalog/ hcatalog/build-support/ant/ hcatalog/core/ hcatalog/core/src/main/java/org/apache/hadoop/mapred/ hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/ hcatalog/core/src/main/java/org/apache...

Author: thejas
Date: Fri Aug 30 07:28:23 2013
New Revision: 1518897

URL: http://svn.apache.org/r1518897
Log:
HIVE-4460 : Publish HCatalog artifacts for Hadoop 2.x  (Eugene Koifman via Thejas Nair)

Modified:
    hive/trunk/hcatalog/build-support/ant/deploy.xml
    hive/trunk/hcatalog/build.properties
    hive/trunk/hcatalog/build.xml
    hive/trunk/hcatalog/core/build.xml
    hive/trunk/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
    hive/trunk/hcatalog/webhcat/svr/pom.xml
    hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
    hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
    hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
    hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/hcatalog/build-support/ant/deploy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/build-support/ant/deploy.xml?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/build-support/ant/deploy.xml (original)
+++ hive/trunk/hcatalog/build-support/ant/deploy.xml Fri Aug 30 07:28:23 2013
@@ -69,7 +69,7 @@
       <_mvnpublish module="testutils" />
     </target>
 
-    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasts jar and deploy all Hive jars to local Maven repo">
+    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasks jar and deploy all Hive jars to local Maven repo">
         <echo message="${ant.project.name}"/>
         <get src="${mvnrepo}/org/apache/maven/maven-ant-tasks/${maven-ant-tasks.version}/maven-ant-tasks-${maven-ant-tasks.version}.jar"
              dest="${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar"

Modified: hive/trunk/hcatalog/build.properties
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/build.properties?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/build.properties (original)
+++ hive/trunk/hcatalog/build.properties Fri Aug 30 07:28:23 2013
@@ -66,12 +66,6 @@ javac.version=1.6
 javac.args=
 javac.args.warnings=
 
-# hive properties
-#shims.name=20
-shims.20S.hive.shims.include=0.20,0.20S
-shims.20S.hadoop.version=${hive.hadoop-0.20S.version}
-shims.23.hive.shims.include=0.23
-shims.23.hadoop.version=${hive.hadoop-0.23.version}
 
 ###############################################################################
 # deploy properties

Modified: hive/trunk/hcatalog/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/build.xml?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/build.xml (original)
+++ hive/trunk/hcatalog/build.xml Fri Aug 30 07:28:23 2013
@@ -186,7 +186,6 @@
         <ant target="clean" dir="webhcat/svr" inheritAll="false"/>
         <ant target="clean" dir="webhcat/java-client" inheritAll="false"/>
         <ant target="clean" dir="storage-handlers/hbase" inheritAll="false"/>
-        <ant target="clean" dir="shims" inheritAll="false"/>
     </target>
 
     <!-- Clean up children -->
@@ -200,7 +199,6 @@
         <ant target="clean-test" dir="webhcat/svr" inheritAll="false"/>
         <ant target="clean-test" dir="webhcat/java-client" inheritAll="false"/>
         <ant target="clean-test" dir="storage-handlers/hbase" inheritAll="false"/>
-        <ant target="clean-test" dir="shims" inheritAll="false"/>
     </target>
 
     <!--
@@ -480,7 +478,6 @@
                 <include name="server-extensions/**"/>
                 <include name="webhcat/**"/>
                 <include name="license/**"/>
-                <include name="shims/**"/>
                 <include name="src/**"/>
                 <include name="storage-handlers/**"/>
                 <include name="*.properties"/>

Modified: hive/trunk/hcatalog/core/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/build.xml?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/build.xml (original)
+++ hive/trunk/hcatalog/core/build.xml Fri Aug 30 07:28:23 2013
@@ -39,15 +39,4 @@
   <path id="findbugs.class.path">
     <fileset dir="${build.dir}/lib/compile"/>
   </path>
-
-  <target name="compile">
-    <echo message="${ant.project.name}"/>
-    <_javac srcDir="${basedir}/src/main/java"
-            destDir="${build.classes}"
-            classPathRef="compile.class.path"/>
-    <ant target="jar" dir="${path.to.basedir}/shims" inheritAll="false">
-        <property name="_mvn.hadoop.profile" value="${mvn.hadoop.profile}"/>
-    </ant>
-  </target>
-
 </project>

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java Fri Aug 30 07:28:23 2013
@@ -19,9 +19,9 @@
 
 package org.apache.hadoop.mapred;
 
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 
 public class HCatMapRedUtil {
 
@@ -32,11 +32,11 @@ public class HCatMapRedUtil {
     }
 
     public static org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf, org.apache.hadoop.mapreduce.TaskAttemptID id) {
-        return  HCatHadoopShims.Instance.get().createTaskAttemptContext(conf,id);
+        return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf,id);
     }
 
     public static TaskAttemptContext createTaskAttemptContext(JobConf conf, TaskAttemptID id, Progressable progressable) {
-        return HCatHadoopShims.Instance.get ().createTaskAttemptContext(conf, id, (Reporter) progressable);
+        return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, id, (Reporter) progressable);
     }
 
     public static org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapreduce.JobContext context) {
@@ -46,6 +46,6 @@ public class HCatMapRedUtil {
     }
 
     public static JobContext createJobContext(JobConf conf, org.apache.hadoop.mapreduce.JobID id, Progressable progressable) {
-        return HCatHadoopShims.Instance.get ().createJobContext(conf, id, (Reporter) progressable);
+        return ShimLoader.getHadoopShims().getHCatShim().createJobContext(conf, id, (Reporter) progressable);
     }
 }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java Fri Aug 30 07:28:23 2013
@@ -24,6 +24,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
@@ -38,7 +39,6 @@ import org.apache.hcatalog.data.transfer
 import org.apache.hcatalog.data.transfer.ReaderContext;
 import org.apache.hcatalog.data.transfer.state.StateProvider;
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 
 /**
  * This reader reads via {@link HCatInputFormat}
@@ -66,7 +66,7 @@ public class HCatInputFormatReader exten
                 job, re.getDbName(), re.getTableName()).setFilter(re.getFilterString());
             ReaderContext cntxt = new ReaderContext();
             cntxt.setInputSplits(hcif.getSplits(
-                HCatHadoopShims.Instance.get().createJobContext(job.getConfiguration(), null)));
+                    ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null)));
             cntxt.setConf(job.getConfiguration());
             return cntxt;
         } catch (IOException e) {
@@ -82,7 +82,7 @@ public class HCatInputFormatReader exten
         HCatInputFormat inpFmt = new HCatInputFormat();
         RecordReader<WritableComparable, HCatRecord> rr;
         try {
-            TaskAttemptContext cntxt = HCatHadoopShims.Instance.get().createTaskAttemptContext(conf, new TaskAttemptID());
+            TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID());
             rr = inpFmt.createRecordReader(split, cntxt);
             rr.initialize(split, cntxt);
         } catch (IOException e) {

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java Fri Aug 30 07:28:23 2013
@@ -24,6 +24,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobStatus.State;
@@ -40,7 +41,6 @@ import org.apache.hcatalog.data.transfer
 import org.apache.hcatalog.data.transfer.state.StateProvider;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 
 /**
  * This writer writes via {@link HCatOutputFormat}
@@ -67,8 +67,8 @@ public class HCatOutputFormatWriter exte
             HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job));
             HCatOutputFormat outFormat = new HCatOutputFormat();
             outFormat.checkOutputSpecs(job);
-            outFormat.getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
-                (job.getConfiguration(), HCatHadoopShims.Instance.get().createTaskAttemptID())).setupJob(job);
+            outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                    job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())).setupJob(job);
         } catch (IOException e) {
             throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
         } catch (InterruptedException e) {
@@ -85,8 +85,8 @@ public class HCatOutputFormatWriter exte
         int id = sp.getId();
         setVarsInConf(id);
         HCatOutputFormat outFormat = new HCatOutputFormat();
-        TaskAttemptContext cntxt = HCatHadoopShims.Instance.get().createTaskAttemptContext
-            (conf, new TaskAttemptID(HCatHadoopShims.Instance.get().createTaskID(), id));
+        TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id));
         OutputCommitter committer = null;
         RecordWriter<WritableComparable<?>, HCatRecord> writer;
         try {
@@ -125,9 +125,9 @@ public class HCatOutputFormatWriter exte
     @Override
     public void commit(WriterContext context) throws HCatException {
         try {
-            new HCatOutputFormat().getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
-                (context.getConf(), HCatHadoopShims.Instance.get().createTaskAttemptID()))
-                .commitJob(HCatHadoopShims.Instance.get().createJobContext(context.getConf(), null));
+            new HCatOutputFormat().getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                    context.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))
+                .commitJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext(context.getConf(), null));
         } catch (IOException e) {
             throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
         } catch (InterruptedException e) {
@@ -138,9 +138,10 @@ public class HCatOutputFormatWriter exte
     @Override
     public void abort(WriterContext context) throws HCatException {
         try {
-            new HCatOutputFormat().getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
-                (context.getConf(), HCatHadoopShims.Instance.get().createTaskAttemptID()))
-                .abortJob(HCatHadoopShims.Instance.get().createJobContext(context.getConf(), null), State.FAILED);
+            new HCatOutputFormat().getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                context.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))
+                .abortJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext(
+                        context.getConf(), null), State.FAILED);
         } catch (IOException e) {
             throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
         } catch (InterruptedException e) {

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java Fri Aug 30 07:28:23 2013
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.HCatMapRedUtil;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -56,7 +57,6 @@ import org.apache.hcatalog.data.schema.H
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.data.schema.HCatSchemaUtils;
 import org.apache.hcatalog.har.HarOutputCommitterPostProcessor;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -304,7 +304,7 @@ class FileOutputCommitterContainer exten
 
         // Apply the group and permissions to the leaf partition and files.
         // Need not bother in case of HDFS as permission is taken care of by setting UMask
-        if (!HCatHadoopShims.Instance.get().isFileInHDFS(fs, partPath)) {
+        if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) {
             applyGroupAndPerms(fs, partPath, perms, grpName, true);
         }
 
@@ -578,7 +578,7 @@ class FileOutputCommitterContainer exten
                         jobConf,
                         context.getJobID(),
                         InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf,
-                            HCatHadoopShims.Instance.get().createTaskAttemptID())));
+                            ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())));
                     HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec);
                     contextDiscoveredByPath.put(st.getPath().toString(), currContext);
                 }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java Fri Aug 30 07:28:23 2013
@@ -36,6 +36,8 @@ import org.apache.commons.lang.StringUti
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
@@ -48,7 +50,6 @@ import org.apache.hadoop.mapreduce.TaskA
 import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -146,13 +147,16 @@ public class MultiOutputFormat extends O
 
     static {
         configsToOverride.add("mapred.output.dir");
-        configsToOverride.add(HCatHadoopShims.Instance.get().getPropertyName(HCatHadoopShims.PropertyName.CACHE_SYMLINK));
+        configsToOverride.add(ShimLoader.getHadoopShims().getHCatShim().getPropertyName(
+                HadoopShims.HCatHadoopShims.PropertyName.CACHE_SYMLINK));
         configsToMerge.put(JobContext.JOB_NAMENODES, COMMA_DELIM);
         configsToMerge.put("tmpfiles", COMMA_DELIM);
         configsToMerge.put("tmpjars", COMMA_DELIM);
         configsToMerge.put("tmparchives", COMMA_DELIM);
-        configsToMerge.put(HCatHadoopShims.Instance.get().getPropertyName(HCatHadoopShims.PropertyName.CACHE_ARCHIVES), COMMA_DELIM);
-        configsToMerge.put(HCatHadoopShims.Instance.get().getPropertyName(HCatHadoopShims.PropertyName.CACHE_FILES), COMMA_DELIM);
+        configsToMerge.put(ShimLoader.getHadoopShims().getHCatShim().getPropertyName(
+                HadoopShims.HCatHadoopShims.PropertyName.CACHE_ARCHIVES), COMMA_DELIM);
+        configsToMerge.put(ShimLoader.getHadoopShims().getHCatShim().getPropertyName(
+                HadoopShims.HCatHadoopShims.PropertyName.CACHE_FILES), COMMA_DELIM);
         String fileSep;
         if (HCatUtil.isHadoop23()) {
             fileSep = ",";
@@ -183,7 +187,8 @@ public class MultiOutputFormat extends O
      */
     public static JobContext getJobContext(String alias, JobContext context) {
         String aliasConf = context.getConfiguration().get(getAliasConfName(alias));
-        JobContext aliasContext = HCatHadoopShims.Instance.get().createJobContext(context.getConfiguration(), context.getJobID());
+        JobContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createJobContext(
+                context.getConfiguration(), context.getJobID());
         addToConfig(aliasConf, aliasContext.getConfiguration());
         return aliasContext;
     }
@@ -197,7 +202,7 @@ public class MultiOutputFormat extends O
      */
     public static TaskAttemptContext getTaskAttemptContext(String alias, TaskAttemptContext context) {
         String aliasConf = context.getConfiguration().get(getAliasConfName(alias));
-        TaskAttemptContext aliasContext = HCatHadoopShims.Instance.get().createTaskAttemptContext(
+        TaskAttemptContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
                 context.getConfiguration(), context.getTaskAttemptID());
         addToConfig(aliasConf, aliasContext.getConfiguration());
         return aliasContext;

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java Fri Aug 30 07:28:23 2013
@@ -28,6 +28,7 @@ import java.util.Map.Entry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -39,7 +40,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hcatalog.common.HCatConstants;
 import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -142,7 +142,7 @@ final class Security {
                     TokenSelector<? extends TokenIdentifier> jtTokenSelector =
                         new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector();
                     Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService(
-                        HCatHadoopShims.Instance.get().getResourceManagerAddress(conf)), ugi.getTokens());
+                        ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens());
                     if (jtToken == null) {
                         //we don't need to cancel this token as the TokenRenewer for JT tokens
                         //takes care of cancelling them

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java Fri Aug 30 07:28:23 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -43,7 +44,6 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -233,7 +233,8 @@ public class TestRCFileMapReduceInputFor
         assertEquals("splits length should be " + splitNumber, splits.size(), splitNumber);
         int readCount = 0;
         for (int i = 0; i < splits.size(); i++) {
-            TaskAttemptContext tac = HCatHadoopShims.Instance.get().createTaskAttemptContext(jonconf, new TaskAttemptID());
+            TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(jonconf,
+                    new TaskAttemptID());
             RecordReader<LongWritable, BytesRefArrayWritable> rr = inputFormat.createRecordReader(splits.get(i), tac);
             rr.initialize(splits.get(i), tac);
             while (rr.nextKeyValue()) {

Modified: hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java (original)
+++ hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java Fri Aug 30 07:28:23 2013
@@ -27,6 +27,7 @@ import java.util.Map.Entry;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.security.Credentials;
@@ -36,7 +37,6 @@ import org.apache.hcatalog.common.HCatEx
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.pig.PigException;
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.impl.logicalLayer.FrontendException;
@@ -157,11 +157,11 @@ public class HCatStorer extends HCatBase
 
     @Override
     public void storeSchema(ResourceSchema schema, String arg1, Job job) throws IOException {
-        HCatHadoopShims.Instance.get().commitJob(getOutputFormat(), job);
+        ShimLoader.getHadoopShims().getHCatShim().commitJob(getOutputFormat(), job);
     }
 
     @Override
     public void cleanupOnFailure(String location, Job job) throws IOException {
-        HCatHadoopShims.Instance.get().abortJob(getOutputFormat(), job);
+        ShimLoader.getHadoopShims().getHCatShim().abortJob(getOutputFormat(), job);
     }
 }

Modified: hive/trunk/hcatalog/webhcat/svr/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/pom.xml?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/pom.xml (original)
+++ hive/trunk/hcatalog/webhcat/svr/pom.xml Fri Aug 30 07:28:23 2013
@@ -36,7 +36,18 @@
     <url>http://maven.apache.org</url>
 
     <dependencies>
-
+        <!--
+        <dependency>
+            <groupId>xerces</groupId>
+            <artifactId>xercesImpl</artifactId>
+            <version>2.9.1</version>
+        </dependency>
+        <dependency>
+            <groupId>xalan</groupId>
+            <artifactId>xalan</artifactId>
+            <version>2.7.1</version>
+        </dependency>
+        -->
         <!-- provided scope - made available as separate package
           not packaged or added as dependency
         -->
@@ -74,12 +85,6 @@
             <scope>compile</scope>
         </dependency>
         <dependency>
-            <groupId>com.sun.jersey</groupId>
-            <artifactId>jersey-json</artifactId>
-            <version>${jersey.version}</version>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
             <groupId>org.codehaus.jackson</groupId>
             <artifactId>jackson-core-asl</artifactId>
             <version>${jackson.version}</version>

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java Fri Aug 30 07:28:23 2013
@@ -19,8 +19,10 @@
 package org.apache.hcatalog.templeton;
 
 import java.io.IOException;
+
+import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hcatalog.templeton.tool.JobState;
 
@@ -36,10 +38,10 @@ public class DeleteDelegator extends Tem
         throws NotAuthorizedException, BadParam, IOException, InterruptedException
     {
         UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
-        TempletonJobTracker tracker = null;
+        WebHCatJTShim tracker = null;
         JobState state = null;
         try {
-            tracker = new TempletonJobTracker(appConf);
+            tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
             JobID jobid = StatusDelegator.StringToJobID(id);
             if (jobid == null)
                 throw new BadParam("Invalid jobid: " + id);

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java Fri Aug 30 07:28:23 2013
@@ -22,8 +22,9 @@ import java.io.IOException;
 import java.util.List;
 import java.util.ArrayList;
 
+import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hcatalog.templeton.tool.JobState;
 
@@ -39,9 +40,9 @@ public class ListDelegator extends Templ
         throws NotAuthorizedException, BadParam, IOException, InterruptedException {
         
         UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
-        TempletonJobTracker tracker = null;
+        WebHCatJTShim tracker = null;
         try {
-            tracker = new TempletonJobTracker(appConf);
+            tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
 
             ArrayList<String> ids = new ArrayList<String>();
 

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java Fri Aug 30 07:28:23 2013
@@ -22,10 +22,11 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.JobProfile;
 import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hcatalog.templeton.tool.JobState;
 
 /**
@@ -41,10 +42,10 @@ public class StatusDelegator extends Tem
     public QueueStatusBean run(String user, String id)
         throws NotAuthorizedException, BadParam, IOException, InterruptedException
     {
-        TempletonJobTracker tracker = null;
+        WebHCatJTShim tracker = null;
         JobState state = null;
         try {
-            tracker = new TempletonJobTracker(appConf);
+            tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
             JobID jobid = StatusDelegator.StringToJobID(id);
             if (jobid == null)
                 throw new BadParam("Invalid jobid: " + id);
@@ -60,7 +61,7 @@ public class StatusDelegator extends Tem
         }
     }
 
-    public static QueueStatusBean makeStatus(TempletonJobTracker tracker,
+    public static QueueStatusBean makeStatus(WebHCatJTShim tracker,
                                              JobID jobid,
                                              String childid,
                                              JobState state)
@@ -87,7 +88,7 @@ public class StatusDelegator extends Tem
         return new QueueStatusBean(state, status, profile);
     }
 
-    public static QueueStatusBean makeStatus(TempletonJobTracker tracker,
+    public static QueueStatusBean makeStatus(WebHCatJTShim tracker,
                                              JobID jobid,
                                              JobState state)
         throws BadParam, IOException {

Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Fri Aug 30 07:28:23 2013
@@ -735,5 +735,12 @@ public class Hadoop20Shims implements Ha
     throw new UnsupportedOperationException(
         "Kerberos not supported in current hadoop version");
   }
-
+  @Override
+  public HCatHadoopShims getHCatShim() {
+      throw new UnsupportedOperationException("HCatalog does not support Hadoop 0.20.x");
+  }
+  @Override
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException {
+      throw new UnsupportedOperationException("WebHCat does not support Hadoop 0.20.x");
+  }
 }

Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Fri Aug 30 07:28:23 2013
@@ -18,21 +18,30 @@
 package org.apache.hadoop.hive.shims;
 
 import java.io.IOException;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URL;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.JobTracker;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskLogServlet;
+import org.apache.hadoop.mapred.WebHCatJTShim20S;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
 
@@ -195,4 +204,129 @@ public class Hadoop20SShims extends Hado
       cluster.shutdown();
     }
   }
+  private volatile HCatHadoopShims hcatShimInstance;
+  @Override
+  public HCatHadoopShims getHCatShim() {
+    if(hcatShimInstance == null) {
+      hcatShimInstance = new HCatHadoopShims20S();
+    }
+    return hcatShimInstance;
+  }
+  private final class HCatHadoopShims20S implements HCatHadoopShims {
+    @Override
+    public TaskID createTaskID() {
+      return new TaskID();
+    }
+
+    @Override
+    public TaskAttemptID createTaskAttemptID() {
+      return new TaskAttemptID();
+    }
+
+    @Override
+    public TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId) {
+      return new TaskAttemptContext(conf, taskId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
+                 org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
+      org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
+      try {
+        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContext.class.getDeclaredConstructor(
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
+                Progressable.class);
+        construct.setAccessible(true);
+        newContext = (org.apache.hadoop.mapred.TaskAttemptContext)construct.newInstance(conf, taskId, progressable);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      return newContext;
+    }
+
+    @Override
+    public JobContext createJobContext(Configuration conf,
+                                       JobID jobId) {
+      return new JobContext(conf, jobId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
+                                   org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
+      org.apache.hadoop.mapred.JobContext newContext = null;
+      try {
+        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.JobContext.class.getDeclaredConstructor(
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapreduce.JobID.class,
+                Progressable.class);
+        construct.setAccessible(true);
+        newContext = (org.apache.hadoop.mapred.JobContext)construct.newInstance(conf, jobId, progressable);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      return newContext;
+    }
+
+    @Override
+    public void commitJob(OutputFormat outputFormat, Job job) throws IOException {
+      if( job.getConfiguration().get("mapred.job.tracker", "").equalsIgnoreCase("local") ) {
+        try {
+          //In local mode, mapreduce will not call OutputCommitter.cleanupJob.
+          //Calling it from here so that the partition publish happens.
+          //This call needs to be removed after MAPREDUCE-1447 is fixed.
+          outputFormat.getOutputCommitter(createTaskAttemptContext(
+                  job.getConfiguration(), createTaskAttemptID())).commitJob(job);
+        } catch (IOException e) {
+          throw new IOException("Failed to cleanup job",e);
+        } catch (InterruptedException e) {
+          throw new IOException("Failed to cleanup job",e);
+        }
+      }
+    }
+
+    @Override
+    public void abortJob(OutputFormat outputFormat, Job job) throws IOException {
+      if (job.getConfiguration().get("mapred.job.tracker", "")
+              .equalsIgnoreCase("local")) {
+        try {
+          // This call needs to be removed after MAPREDUCE-1447 is fixed.
+          outputFormat.getOutputCommitter(createTaskAttemptContext(
+                  job.getConfiguration(), new TaskAttemptID())).abortJob(job, JobStatus.State.FAILED);
+        } catch (IOException e) {
+          throw new IOException("Failed to abort job", e);
+        } catch (InterruptedException e) {
+          throw new IOException("Failed to abort job", e);
+        }
+      }
+    }
+
+    @Override
+    public InetSocketAddress getResourceManagerAddress(Configuration conf)
+    {
+      return JobTracker.getAddress(conf);
+    }
+
+    @Override
+    public String getPropertyName(PropertyName name) {
+      switch (name) {
+        case CACHE_ARCHIVES:
+          return DistributedCache.CACHE_ARCHIVES;
+        case CACHE_FILES:
+          return DistributedCache.CACHE_FILES;
+        case CACHE_SYMLINK:
+          return DistributedCache.CACHE_SYMLINK;
+      }
+
+      return "";
+    }
+
+    @Override
+    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException {
+      // In hadoop 1.x.x the file system URI is sufficient to determine the uri of the file
+      return "hdfs".equals(fs.getUri().getScheme());
+    }
+  }
+  @Override
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException {
+    return new WebHCatJTShim20S(conf);//this has state, so can't be cached
+  }
 }

Modified: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Fri Aug 30 07:28:23 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.shims;
 
 import java.io.IOException;
 import java.lang.Integer;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Map;
@@ -28,17 +29,24 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.WebHCatJTShim23;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.mapreduce.util.HostUtil;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
 
@@ -230,4 +238,104 @@ public class Hadoop23Shims extends Hadoo
       cluster.shutdown();
     }
   }
+  private volatile HCatHadoopShims hcatShimInstance;
+  @Override
+  public HCatHadoopShims getHCatShim() {
+    if(hcatShimInstance == null) {
+      hcatShimInstance = new HCatHadoopShims23();
+    }
+    return hcatShimInstance;
+  }
+  private final class HCatHadoopShims23 implements HCatHadoopShims {
+    @Override
+    public TaskID createTaskID() {
+      return new TaskID("", 0, TaskType.MAP, 0);
+    }
+
+    @Override
+    public TaskAttemptID createTaskAttemptID() {
+      return new TaskAttemptID("", 0, TaskType.MAP, 0, 0);
+    }
+
+    @Override
+    public org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf,
+                                                                                   org.apache.hadoop.mapreduce.TaskAttemptID taskId) {
+      return new org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl(
+              conf instanceof JobConf? new JobConf(conf) : conf,
+              taskId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
+                                                                                org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
+      org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
+      try {
+        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContextImpl.class.getDeclaredConstructor(
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
+                Reporter.class);
+        construct.setAccessible(true);
+        newContext = (org.apache.hadoop.mapred.TaskAttemptContext) construct.newInstance(
+                new JobConf(conf), taskId, (Reporter) progressable);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      return newContext;
+    }
+
+    @Override
+    public JobContext createJobContext(Configuration conf,
+                                       JobID jobId) {
+      return new JobContextImpl(conf instanceof JobConf? new JobConf(conf) : conf,
+              jobId);
+    }
+
+    @Override
+    public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
+                                                                org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
+      return new org.apache.hadoop.mapred.JobContextImpl(
+              new JobConf(conf), jobId, (org.apache.hadoop.mapred.Reporter) progressable);
+    }
+
+    @Override
+    public void commitJob(OutputFormat outputFormat, Job job) throws IOException {
+      // Do nothing as this was fixed by MAPREDUCE-1447.
+    }
+
+    @Override
+    public void abortJob(OutputFormat outputFormat, Job job) throws IOException {
+      // Do nothing as this was fixed by MAPREDUCE-1447.
+    }
+
+    @Override
+    public InetSocketAddress getResourceManagerAddress(Configuration conf) {
+      String addr = conf.get("yarn.resourcemanager.address", "localhost:8032");
+
+      return NetUtils.createSocketAddr(addr);
+    }
+
+    @Override
+    public String getPropertyName(PropertyName name) {
+      switch (name) {
+        case CACHE_ARCHIVES:
+          return MRJobConfig.CACHE_ARCHIVES;
+        case CACHE_FILES:
+          return MRJobConfig.CACHE_FILES;
+        case CACHE_SYMLINK:
+          return MRJobConfig.CACHE_SYMLINK;
+      }
+
+      return "";
+    }
+
+    @Override
+    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException {
+      // In case of viewfs we need to lookup where the actual file is to know the filesystem in use.
+      // resolvePath is a sure shot way of knowing which file system the file is.
+      return "hdfs".equals(fs.resolvePath(path).toUri().getScheme());
+    }
+  }
+  @Override
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException {
+    return new WebHCatJTShim23(conf);//this has state, so can't be cached
+  }
 }

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1518897&r1=1518896&r2=1518897&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Fri Aug 30 07:28:23 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.shims;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -40,13 +41,19 @@ import org.apache.hadoop.mapred.ClusterS
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobProfile;
+import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 
@@ -461,4 +468,72 @@ public interface HadoopShims {
         Class<RecordReader<K, V>> rrClass) throws IOException;
   }
 
+  public HCatHadoopShims getHCatShim();
+  public interface HCatHadoopShims {
+
+    enum PropertyName {CACHE_ARCHIVES, CACHE_FILES, CACHE_SYMLINK}
+
+    public TaskID createTaskID();
+
+    public TaskAttemptID createTaskAttemptID();
+
+    public org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf,
+                                                                                   TaskAttemptID taskId);
+
+    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(JobConf conf,
+                                                                                org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable);
+
+    public JobContext createJobContext(Configuration conf, JobID jobId);
+
+    public org.apache.hadoop.mapred.JobContext createJobContext(JobConf conf, JobID jobId, Progressable progressable);
+
+    public void commitJob(OutputFormat outputFormat, Job job) throws IOException;
+
+    public void abortJob(OutputFormat outputFormat, Job job) throws IOException;
+
+    /* Referring to job tracker in 0.20 and resource manager in 0.23 */
+    public InetSocketAddress getResourceManagerAddress(Configuration conf);
+
+    public String getPropertyName(PropertyName name);
+
+    /**
+     * Checks if file is in HDFS filesystem.
+     *
+     * @param fs
+     * @param path
+     * @return true if the file is in HDFS, false if the file is in other file systems.
+     */
+    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException;
+  }
+  /**
+   * Provides a Hadoop JobTracker shim.
+   * @param conf not {@code null}
+   */
+  public WebHCatJTShim getWebHCatShim(Configuration conf) throws IOException;
+  public interface WebHCatJTShim {
+    /**
+     * Grab a handle to a job that is already known to the JobTracker.
+     *
+     * @return Profile of the job, or null if not found.
+     */
+    public JobProfile getJobProfile(org.apache.hadoop.mapred.JobID jobid) throws IOException;
+    /**
+     * Grab a handle to a job that is already known to the JobTracker.
+     *
+     * @return Status of the job, or null if not found.
+     */
+    public JobStatus getJobStatus(org.apache.hadoop.mapred.JobID jobid) throws IOException;
+    /**
+     * Kill a job.
+     */
+    public void killJob(org.apache.hadoop.mapred.JobID jobid) throws IOException;
+    /**
+     * Get all the jobs submitted.
+     */
+    public JobStatus[] getAllJobs() throws IOException;
+    /**
+     * Close the connection to the Job Tracker.
+     */
+    public void close();
+  }
 }