You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/08/30 21:46:17 UTC

svn commit: r1519056 [1/3] - in /hive/branches/tez: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ hbase-handler/src/test/queries/positive/ hbase-handler/src/test/results/positive/ hcatalog/ hcatalog/build-support/ant/ hcatalog/core/ hcatalog/co...

Author: gunther
Date: Fri Aug 30 19:46:15 2013
New Revision: 1519056

URL: http://svn.apache.org/r1519056
Log:
Merge latest trunk into branch (Gunther Hagleitner)

Added:
    hive/branches/tez/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q
      - copied unchanged from r1519052, hive/trunk/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q
    hive/branches/tez/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out
      - copied unchanged from r1519052, hive/trunk/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out
    hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/doas.conf
      - copied unchanged from r1519052, hive/trunk/hcatalog/src/test/e2e/templeton/tests/doas.conf
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ProxyUserSupport.java
      - copied unchanged from r1519052, hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ProxyUserSupport.java
    hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
      - copied unchanged from r1519052, hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
      - copied unchanged from r1519052, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java
      - copied unchanged from r1519052, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java
    hive/branches/tez/ql/src/test/queries/clientpositive/limit_pushdown.q
      - copied unchanged from r1519052, hive/trunk/ql/src/test/queries/clientpositive/limit_pushdown.q
    hive/branches/tez/ql/src/test/queries/clientpositive/limit_pushdown_negative.q
      - copied unchanged from r1519052, hive/trunk/ql/src/test/queries/clientpositive/limit_pushdown_negative.q
    hive/branches/tez/ql/src/test/results/clientpositive/limit_pushdown.q.out
      - copied unchanged from r1519052, hive/trunk/ql/src/test/results/clientpositive/limit_pushdown.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/limit_pushdown_negative.q.out
      - copied unchanged from r1519052, hive/trunk/ql/src/test/results/clientpositive/limit_pushdown_negative.q.out
    hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/mapred/
      - copied from r1519052, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/mapred/
    hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/mapred/
      - copied from r1519052, hive/trunk/shims/src/0.23/java/org/apache/hadoop/mapred/
Removed:
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/shims/HCatHadoopShims.java
    hive/branches/tez/hcatalog/shims/src/20/java/org/apache/hadoop/mapred/TempletonJobTracker.java
    hive/branches/tez/hcatalog/shims/src/20/java/org/apache/hcatalog/shims/HCatHadoopShims20S.java
    hive/branches/tez/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java
    hive/branches/tez/hcatalog/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/tez/conf/hive-default.xml.template
    hive/branches/tez/hcatalog/build-support/ant/deploy.xml
    hive/branches/tez/hcatalog/build.properties
    hive/branches/tez/hcatalog/build.xml
    hive/branches/tez/hcatalog/core/build.xml
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
    hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java
    hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
    hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
    hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
    hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt
    hive/branches/tez/hcatalog/src/test/e2e/templeton/build.xml
    hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
    hive/branches/tez/hcatalog/webhcat/svr/pom.xml
    hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Server.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/UgiFactory.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/HDFSStorage.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/NotFoundException.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingRawStore.java
    hive/branches/tez/ql/build.xml
    hive/branches/tez/ql/ivy.xml
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java
    hive/branches/tez/ql/src/test/resources/orc-file-dump.out
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContext.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionHookContextImpl.java
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
    hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1517698-1519052

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Aug 30 19:46:15 2013
@@ -505,9 +505,8 @@ public class HiveConf extends Configurat
 
     // Maximum fraction of heap that can be used by ORC file writers
     HIVE_ORC_FILE_MEMORY_POOL("hive.exec.orc.memory.pool", 0.5f), // 50%
-    // use 0.11 version of RLE encoding. if this conf is not defined or any
-    // other value specified, ORC will use the new RLE encoding
-    HIVE_ORC_WRITE_FORMAT("hive.exec.orc.write.format", "0.11"),
+    // Define the version of the file to write
+    HIVE_ORC_WRITE_FORMAT("hive.exec.orc.write.format", null),
 
     HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD("hive.exec.orc.dictionary.key.size.threshold", 0.8f),
 
@@ -525,6 +524,8 @@ public class HiveConf extends Configurat
     HIVELIMITOPTLIMITFILE("hive.limit.optimize.limit.file", 10),
     HIVELIMITOPTENABLE("hive.limit.optimize.enable", false),
     HIVELIMITOPTMAXFETCH("hive.limit.optimize.fetch.max", 50000),
+    HIVELIMITPUSHDOWNMEMORYUSAGE("hive.limit.pushdown.memory.usage", -1f),
+
     HIVEHASHTABLETHRESHOLD("hive.hashtable.initialCapacity", 100000),
     HIVEHASHTABLELOADFACTOR("hive.hashtable.loadfactor", (float) 0.75),
     HIVEHASHTABLEFOLLOWBYGBYMAXMEMORYUSAGE("hive.mapjoin.followby.gby.localtask.max.memory.usage", (float) 0.55),

Modified: hive/branches/tez/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/tez/conf/hive-default.xml.template?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/conf/hive-default.xml.template (original)
+++ hive/branches/tez/conf/hive-default.xml.template Fri Aug 30 19:46:15 2013
@@ -431,19 +431,19 @@
 <property>
   <name>hive.mapjoin.followby.map.aggr.hash.percentmemory</name>
   <value>0.3</value>
-  <description>Portion of total memory to be used by map-side grup aggregation hash table, when this group by is followed by map join</description>
+  <description>Portion of total memory to be used by map-side group aggregation hash table, when this group by is followed by map join</description>
 </property>
 
 <property>
   <name>hive.map.aggr.hash.force.flush.memory.threshold</name>
   <value>0.9</value>
-  <description>The max memory to be used by map-side grup aggregation hash table, if the memory usage is higher than this number, force to flush data</description>
+  <description>The max memory to be used by map-side group aggregation hash table, if the memory usage is higher than this number, force to flush data</description>
 </property>
 
 <property>
   <name>hive.map.aggr.hash.percentmemory</name>
   <value>0.5</value>
-  <description>Portion of total memory to be used by map-side grup aggregation hash table</description>
+  <description>Portion of total memory to be used by map-side group aggregation hash table</description>
 </property>
 
 <property>
@@ -1584,6 +1584,12 @@
 </property>
 
 <property>
+  <name>hive.limit.pushdown.memory.usage</name>
+  <value>0.3f</value>
+  <description>The max memory to be used for hash in RS operator for top K selection.</description>
+</property>
+
+<property>
   <name>hive.rework.mapredwork</name>
   <value>false</value>
   <description>should rework the mapred work or not.

Modified: hive/branches/tez/hcatalog/build-support/ant/deploy.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build-support/ant/deploy.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build-support/ant/deploy.xml (original)
+++ hive/branches/tez/hcatalog/build-support/ant/deploy.xml Fri Aug 30 19:46:15 2013
@@ -69,7 +69,7 @@
       <_mvnpublish module="testutils" />
     </target>
 
-    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasts jar and deploy all Hive jars to local Maven repo">
+    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasks jar and deploy all Hive jars to local Maven repo">
         <echo message="${ant.project.name}"/>
         <get src="${mvnrepo}/org/apache/maven/maven-ant-tasks/${maven-ant-tasks.version}/maven-ant-tasks-${maven-ant-tasks.version}.jar"
              dest="${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar"

Modified: hive/branches/tez/hcatalog/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build.properties?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build.properties (original)
+++ hive/branches/tez/hcatalog/build.properties Fri Aug 30 19:46:15 2013
@@ -66,12 +66,6 @@ javac.version=1.6
 javac.args=
 javac.args.warnings=
 
-# hive properties
-#shims.name=20
-shims.20S.hive.shims.include=0.20,0.20S
-shims.20S.hadoop.version=${hive.hadoop-0.20S.version}
-shims.23.hive.shims.include=0.23
-shims.23.hadoop.version=${hive.hadoop-0.23.version}
 
 ###############################################################################
 # deploy properties

Modified: hive/branches/tez/hcatalog/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build.xml (original)
+++ hive/branches/tez/hcatalog/build.xml Fri Aug 30 19:46:15 2013
@@ -186,7 +186,6 @@
         <ant target="clean" dir="webhcat/svr" inheritAll="false"/>
         <ant target="clean" dir="webhcat/java-client" inheritAll="false"/>
         <ant target="clean" dir="storage-handlers/hbase" inheritAll="false"/>
-        <ant target="clean" dir="shims" inheritAll="false"/>
     </target>
 
     <!-- Clean up children -->
@@ -200,7 +199,6 @@
         <ant target="clean-test" dir="webhcat/svr" inheritAll="false"/>
         <ant target="clean-test" dir="webhcat/java-client" inheritAll="false"/>
         <ant target="clean-test" dir="storage-handlers/hbase" inheritAll="false"/>
-        <ant target="clean-test" dir="shims" inheritAll="false"/>
     </target>
 
     <!--
@@ -480,7 +478,6 @@
                 <include name="server-extensions/**"/>
                 <include name="webhcat/**"/>
                 <include name="license/**"/>
-                <include name="shims/**"/>
                 <include name="src/**"/>
                 <include name="storage-handlers/**"/>
                 <include name="*.properties"/>

Modified: hive/branches/tez/hcatalog/core/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/build.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/build.xml (original)
+++ hive/branches/tez/hcatalog/core/build.xml Fri Aug 30 19:46:15 2013
@@ -39,15 +39,4 @@
   <path id="findbugs.class.path">
     <fileset dir="${build.dir}/lib/compile"/>
   </path>
-
-  <target name="compile">
-    <echo message="${ant.project.name}"/>
-    <_javac srcDir="${basedir}/src/main/java"
-            destDir="${build.classes}"
-            classPathRef="compile.class.path"/>
-    <ant target="jar" dir="${path.to.basedir}/shims" inheritAll="false">
-        <property name="_mvn.hadoop.profile" value="${mvn.hadoop.profile}"/>
-    </ant>
-  </target>
-
 </project>

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hadoop/mapred/HCatMapRedUtil.java Fri Aug 30 19:46:15 2013
@@ -19,9 +19,9 @@
 
 package org.apache.hadoop.mapred;
 
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 
 public class HCatMapRedUtil {
 
@@ -32,11 +32,11 @@ public class HCatMapRedUtil {
     }
 
     public static org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf, org.apache.hadoop.mapreduce.TaskAttemptID id) {
-        return  HCatHadoopShims.Instance.get().createTaskAttemptContext(conf,id);
+        return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf,id);
     }
 
     public static TaskAttemptContext createTaskAttemptContext(JobConf conf, TaskAttemptID id, Progressable progressable) {
-        return HCatHadoopShims.Instance.get ().createTaskAttemptContext(conf, id, (Reporter) progressable);
+        return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, id, (Reporter) progressable);
     }
 
     public static org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapreduce.JobContext context) {
@@ -46,6 +46,6 @@ public class HCatMapRedUtil {
     }
 
     public static JobContext createJobContext(JobConf conf, org.apache.hadoop.mapreduce.JobID id, Progressable progressable) {
-        return HCatHadoopShims.Instance.get ().createJobContext(conf, id, (Reporter) progressable);
+        return ShimLoader.getHadoopShims().getHCatShim().createJobContext(conf, id, (Reporter) progressable);
     }
 }

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java Fri Aug 30 19:46:15 2013
@@ -24,6 +24,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
@@ -38,7 +39,6 @@ import org.apache.hcatalog.data.transfer
 import org.apache.hcatalog.data.transfer.ReaderContext;
 import org.apache.hcatalog.data.transfer.state.StateProvider;
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 
 /**
  * This reader reads via {@link HCatInputFormat}
@@ -66,7 +66,7 @@ public class HCatInputFormatReader exten
                 job, re.getDbName(), re.getTableName()).setFilter(re.getFilterString());
             ReaderContext cntxt = new ReaderContext();
             cntxt.setInputSplits(hcif.getSplits(
-                HCatHadoopShims.Instance.get().createJobContext(job.getConfiguration(), null)));
+                    ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null)));
             cntxt.setConf(job.getConfiguration());
             return cntxt;
         } catch (IOException e) {
@@ -82,7 +82,7 @@ public class HCatInputFormatReader exten
         HCatInputFormat inpFmt = new HCatInputFormat();
         RecordReader<WritableComparable, HCatRecord> rr;
         try {
-            TaskAttemptContext cntxt = HCatHadoopShims.Instance.get().createTaskAttemptContext(conf, new TaskAttemptID());
+            TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID());
             rr = inpFmt.createRecordReader(split, cntxt);
             rr.initialize(split, cntxt);
         } catch (IOException e) {

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java Fri Aug 30 19:46:15 2013
@@ -24,6 +24,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobStatus.State;
@@ -40,7 +41,6 @@ import org.apache.hcatalog.data.transfer
 import org.apache.hcatalog.data.transfer.state.StateProvider;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 
 /**
  * This writer writes via {@link HCatOutputFormat}
@@ -67,8 +67,8 @@ public class HCatOutputFormatWriter exte
             HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job));
             HCatOutputFormat outFormat = new HCatOutputFormat();
             outFormat.checkOutputSpecs(job);
-            outFormat.getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
-                (job.getConfiguration(), HCatHadoopShims.Instance.get().createTaskAttemptID())).setupJob(job);
+            outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                    job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())).setupJob(job);
         } catch (IOException e) {
             throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
         } catch (InterruptedException e) {
@@ -85,8 +85,8 @@ public class HCatOutputFormatWriter exte
         int id = sp.getId();
         setVarsInConf(id);
         HCatOutputFormat outFormat = new HCatOutputFormat();
-        TaskAttemptContext cntxt = HCatHadoopShims.Instance.get().createTaskAttemptContext
-            (conf, new TaskAttemptID(HCatHadoopShims.Instance.get().createTaskID(), id));
+        TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id));
         OutputCommitter committer = null;
         RecordWriter<WritableComparable<?>, HCatRecord> writer;
         try {
@@ -125,9 +125,9 @@ public class HCatOutputFormatWriter exte
     @Override
     public void commit(WriterContext context) throws HCatException {
         try {
-            new HCatOutputFormat().getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
-                (context.getConf(), HCatHadoopShims.Instance.get().createTaskAttemptID()))
-                .commitJob(HCatHadoopShims.Instance.get().createJobContext(context.getConf(), null));
+            new HCatOutputFormat().getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                    context.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))
+                .commitJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext(context.getConf(), null));
         } catch (IOException e) {
             throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
         } catch (InterruptedException e) {
@@ -138,9 +138,10 @@ public class HCatOutputFormatWriter exte
     @Override
     public void abort(WriterContext context) throws HCatException {
         try {
-            new HCatOutputFormat().getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
-                (context.getConf(), HCatHadoopShims.Instance.get().createTaskAttemptID()))
-                .abortJob(HCatHadoopShims.Instance.get().createJobContext(context.getConf(), null), State.FAILED);
+            new HCatOutputFormat().getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
+                context.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))
+                .abortJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext(
+                        context.getConf(), null), State.FAILED);
         } catch (IOException e) {
             throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
         } catch (InterruptedException e) {

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java Fri Aug 30 19:46:15 2013
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.HCatMapRedUtil;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -56,7 +57,6 @@ import org.apache.hcatalog.data.schema.H
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.data.schema.HCatSchemaUtils;
 import org.apache.hcatalog.har.HarOutputCommitterPostProcessor;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -304,7 +304,7 @@ class FileOutputCommitterContainer exten
 
         // Apply the group and permissions to the leaf partition and files.
         // Need not bother in case of HDFS as permission is taken care of by setting UMask
-        if (!HCatHadoopShims.Instance.get().isFileInHDFS(fs, partPath)) {
+        if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) {
             applyGroupAndPerms(fs, partPath, perms, grpName, true);
         }
 
@@ -578,7 +578,7 @@ class FileOutputCommitterContainer exten
                         jobConf,
                         context.getJobID(),
                         InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf,
-                            HCatHadoopShims.Instance.get().createTaskAttemptID())));
+                            ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())));
                     HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec);
                     contextDiscoveredByPath.put(st.getPath().toString(), currContext);
                 }

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java Fri Aug 30 19:46:15 2013
@@ -159,6 +159,8 @@ public class HCatOutputFormat extends HC
                 outputJobInfo.setPartitionValues(valueMap);
             }
 
+            // To get around hbase failure on single node, see BUG-4383
+            conf.set("dfs.client.read.shortcircuit", "false");
             HCatSchema tableSchema = HCatUtil.extractSchema(table);
             StorerInfo storerInfo =
                 InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java Fri Aug 30 19:46:15 2013
@@ -36,6 +36,8 @@ import org.apache.commons.lang.StringUti
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
@@ -48,7 +50,6 @@ import org.apache.hadoop.mapreduce.TaskA
 import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -146,13 +147,16 @@ public class MultiOutputFormat extends O
 
     static {
         configsToOverride.add("mapred.output.dir");
-        configsToOverride.add(HCatHadoopShims.Instance.get().getPropertyName(HCatHadoopShims.PropertyName.CACHE_SYMLINK));
+        configsToOverride.add(ShimLoader.getHadoopShims().getHCatShim().getPropertyName(
+                HadoopShims.HCatHadoopShims.PropertyName.CACHE_SYMLINK));
         configsToMerge.put(JobContext.JOB_NAMENODES, COMMA_DELIM);
         configsToMerge.put("tmpfiles", COMMA_DELIM);
         configsToMerge.put("tmpjars", COMMA_DELIM);
         configsToMerge.put("tmparchives", COMMA_DELIM);
-        configsToMerge.put(HCatHadoopShims.Instance.get().getPropertyName(HCatHadoopShims.PropertyName.CACHE_ARCHIVES), COMMA_DELIM);
-        configsToMerge.put(HCatHadoopShims.Instance.get().getPropertyName(HCatHadoopShims.PropertyName.CACHE_FILES), COMMA_DELIM);
+        configsToMerge.put(ShimLoader.getHadoopShims().getHCatShim().getPropertyName(
+                HadoopShims.HCatHadoopShims.PropertyName.CACHE_ARCHIVES), COMMA_DELIM);
+        configsToMerge.put(ShimLoader.getHadoopShims().getHCatShim().getPropertyName(
+                HadoopShims.HCatHadoopShims.PropertyName.CACHE_FILES), COMMA_DELIM);
         String fileSep;
         if (HCatUtil.isHadoop23()) {
             fileSep = ",";
@@ -183,7 +187,8 @@ public class MultiOutputFormat extends O
      */
     public static JobContext getJobContext(String alias, JobContext context) {
         String aliasConf = context.getConfiguration().get(getAliasConfName(alias));
-        JobContext aliasContext = HCatHadoopShims.Instance.get().createJobContext(context.getConfiguration(), context.getJobID());
+        JobContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createJobContext(
+                context.getConfiguration(), context.getJobID());
         addToConfig(aliasConf, aliasContext.getConfiguration());
         return aliasContext;
     }
@@ -197,7 +202,7 @@ public class MultiOutputFormat extends O
      */
     public static TaskAttemptContext getTaskAttemptContext(String alias, TaskAttemptContext context) {
         String aliasConf = context.getConfiguration().get(getAliasConfName(alias));
-        TaskAttemptContext aliasContext = HCatHadoopShims.Instance.get().createTaskAttemptContext(
+        TaskAttemptContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
                 context.getConfiguration(), context.getTaskAttemptID());
         addToConfig(aliasConf, aliasContext.getConfiguration());
         return aliasContext;

Modified: hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java (original)
+++ hive/branches/tez/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/Security.java Fri Aug 30 19:46:15 2013
@@ -28,6 +28,7 @@ import java.util.Map.Entry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -39,7 +40,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hcatalog.common.HCatConstants;
 import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -142,7 +142,7 @@ final class Security {
                     TokenSelector<? extends TokenIdentifier> jtTokenSelector =
                         new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector();
                     Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService(
-                        HCatHadoopShims.Instance.get().getResourceManagerAddress(conf)), ugi.getTokens());
+                        ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens());
                     if (jtToken == null) {
                         //we don't need to cancel this token as the TokenRenewer for JT tokens
                         //takes care of cancelling them

Modified: hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (original)
+++ hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java Fri Aug 30 19:46:15 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -43,7 +44,6 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -233,7 +233,8 @@ public class TestRCFileMapReduceInputFor
         assertEquals("splits length should be " + splitNumber, splits.size(), splitNumber);
         int readCount = 0;
         for (int i = 0; i < splits.size(); i++) {
-            TaskAttemptContext tac = HCatHadoopShims.Instance.get().createTaskAttemptContext(jonconf, new TaskAttemptID());
+            TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(jonconf,
+                    new TaskAttemptID());
             RecordReader<LongWritable, BytesRefArrayWritable> rr = inputFormat.createRecordReader(splits.get(i), tac);
             rr.initialize(splits.get(i), tac);
             while (rr.nextKeyValue()) {

Modified: hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java (original)
+++ hive/branches/tez/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java Fri Aug 30 19:46:15 2013
@@ -27,6 +27,7 @@ import java.util.Map.Entry;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.security.Credentials;
@@ -36,7 +37,6 @@ import org.apache.hcatalog.common.HCatEx
 import org.apache.hcatalog.data.schema.HCatSchema;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.pig.PigException;
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.impl.logicalLayer.FrontendException;
@@ -157,11 +157,11 @@ public class HCatStorer extends HCatBase
 
     @Override
     public void storeSchema(ResourceSchema schema, String arg1, Job job) throws IOException {
-        HCatHadoopShims.Instance.get().commitJob(getOutputFormat(), job);
+        ShimLoader.getHadoopShims().getHCatShim().commitJob(getOutputFormat(), job);
     }
 
     @Override
     public void cleanupOnFailure(String location, Job job) throws IOException {
-        HCatHadoopShims.Instance.get().abortJob(getOutputFormat(), job);
+        ShimLoader.getHadoopShims().getHCatShim().abortJob(getOutputFormat(), job);
     }
 }

Modified: hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml (original)
+++ hive/branches/tez/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml Fri Aug 30 19:46:15 2013
@@ -286,6 +286,46 @@ Using localhost in metastore uri does no
         principal.</td>
   </tr>
 
+   <tr>
+     <td><strong>webhcat.proxyuser.#USER#.hosts</strong></td>
+     <td>None</td>
+     <td>List of client hosts from which the '#USER#' user is allowed to perform
+         'doAs' operations.
+
+         The '#USER#' must be replaced with the username of the user who is
+         allowed to perform 'doAs' operations.
+
+         The value can be the '*' wildcard, which means every host is allowed,
+         or a comma-separated list of hostnames.
+
+         If value is a blank string or webhcat.proxyuser.#USER#.hosts is missing,
+         no hosts will be allowed.
+
+         For multiple users copy this property and replace the user name
+         in the property name.</td>
+   </tr>
+   <tr>
+     <td><strong>webhcat.proxyuser.#USER#.groups</strong></td>
+     <td>None</td>
+     <td>List of groups the '#USER#' user is allowed to impersonate users
+         from to perform 'doAs' operations.
+
+         The '#USER#' must be replaced with the username of the user who is
+         allowed to perform 'doAs' operations.
+
+         The value can be the '*' wildcard, which means any doAs value is
+         allowed, or a comma-separated list of groups.
+
+         If value is an empty list or webhcat.proxyuser.#USER#.groups is missing,
+         every doAs call value will fail.
+
+         For multiple users copy this property and replace the user name
+         in the property name.
+
+         The username->usergroup mapping is performed using Hadoop API which is
+         controlled by hadoop.security.group.mapping property.</td>
+    </tr>
+
   </table>
   </section>
 

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/README.txt Fri Aug 30 19:46:15 2013
@@ -123,6 +123,29 @@ ant test-hcat-authorization -Dkeytab.dir
 
 The <keytab files dir> is expected to have keytab filenames of the form - user_name.*keytab .
 
+Running WebHCat doas tests
+--------------------------
+ant clean test-doas -Dinpdir.hdfs=/user/ekoifman/webhcate2e -Dsecure.mode=no   
+    -Dharness.webhdfs.url=http://localhost:8085  -Dharness.templeton.url=http://localhost:50111 
+    -Dtests.to.run='-t doAsTests' -Dtest.user.name=hue -Ddoas.user=joe
+    
+The canonical example, is WebHCat server is running as user 'hcat', end user 'joe' is using Hue,
+which generates a request to WebHCat.  If Hue specifies doAs=joe, then the commands that WebHCat
+submits to Hadoop will be run as user 'joe'.
+
+In order for this test suite to work, webhcat-site.xml should have webhcat.proxyuser.hue.groups
+and webhcat.proxyuser.hue.hosts defined, i.e. 'hue' should be allowed to impersonate 'joe'.
+[Of course, 'hcat' proxyuser should be configured in core-site.xml for the command to succeed.]
+
+Furthermore, metastore side file based security should be enabled.  To do this 3 properties in
+hive-site.xml should be configured:
+1) hive.security.metastore.authorization.manager set to 
+    org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
+2) hive.security.metastore.authenticator.manager set to 
+    org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator
+3) hive.metastore.pre.event.listeners set to
+    org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener
+4) hive.metastore.execute.setugi set to true
 
 Notes
 -----

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/build.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/build.xml (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/build.xml Fri Aug 30 19:46:15 2013
@@ -83,11 +83,15 @@
 
     <target name="test" depends="property-check, init-test">
         <!-- fork (parallelization) factors for e2e tests execution.
-             Defaults are 1, which means *no* parellelization: -->
-        <property name="fork.factor.group" value="1"/>
-        <property name="fork.factor.conf.file" value="1"/>
+             Defaults are 1, which means *no* parellelization: 
+             if group=3, then 3 .conf files will be processed in parallel
+             if conf.file=2 there will be 2 thread per .conf file, each thread 
+             executing a single group (identified by 'name' element) -->
+        <property name="fork.factor.group" value="5"/>
+        <property name="fork.factor.conf.file" value="5"/>
         <property name="e2e.debug" value="false"/>
         <property name="tests.to.run" value=""/>
+
         <exec executable="./test_harness.pl" dir="${test.location}" failonerror="true">
             <env key="HARNESS_ROOT" value="."/>
             <env key="TH_WORKING_DIR" value="${test.location}"/>
@@ -141,6 +145,42 @@
             <arg value="${basedir}/tests/hcatperms.conf"/>
         </exec>
     </target>
+    <target name="test-doas" depends="property-check, init-test" description="See README.txt for instructions">
+        <!-- fork (parallelization) factors for e2e tests execution.
+             Defaults are 1, which means *no* parellelization: 
+             if group=3, then 3 .conf files will be processed in parallel
+             if conf.file=2 there will be 2 thread per .conf file, each thread 
+             executing a single group (identified by 'name' element) -->
+        <property name="fork.factor.group" value="1"/>
+        <property name="fork.factor.conf.file" value="1"/>
+        <property name="e2e.debug" value="false"/>
+        <property name="tests.to.run" value=""/>
+        <property name="doas.user.tmp" value="${doas.user}" />
+        <condition property="doas.user" value="${test.user.name}">
+            <!--default doas.user (if not set) to test.user.name-->
+            <isset property="doas.user.tmp"/>
+        </condition>
+
+        <exec executable="./test_harness.pl" dir="${test.location}" failonerror="true">
+            <env key="HARNESS_ROOT" value="."/>
+            <env key="TH_WORKING_DIR" value="${test.location}"/>
+            <env key="TH_INPDIR_LOCAL" value="${inpdir.local}"/>
+            <env key="TH_INPDIR_HDFS" value="${inpdir.hdfs}"/>
+            <env key="TH_OUT" value="."/>
+            <env key="TH_ROOT" value="."/>
+            <env key="FORK_FACTOR_GROUP" value="${fork.factor.group}"/>
+            <env key="FORK_FACTOR_FILE" value="${fork.factor.conf.file}"/>
+            <env key="E2E_DEBUG" value="${e2e.debug}"/>
+            <env key="WEBHDFS_URL" value="${harness.webhdfs.url}"/>
+            <env key="TEMPLETON_URL" value="${harness.templeton.url}"/>
+            <env key="USER_NAME" value="${test.user.name}"/>
+            <env key="DOAS_USER" value="${doas.user}"/>
+            <env key="HARNESS_CONF" value="${basedir}/conf/default.conf"/>
+            <env key="SECURE_MODE" value="${secure.mode}"/>
+            <arg line="${tests.to.run}"/>
+            <arg value="${basedir}/tests/doas.conf"/>
+        </exec>
+    </target>
 
     <target name="clean">
         <delete dir="${test.location}"/>

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Fri Aug 30 19:46:15 2013
@@ -174,6 +174,7 @@ sub globalSetup
     $globalHash->{'webhdfs_url'} = $ENV{'WEBHDFS_URL'};
     $globalHash->{'templeton_url'} = $ENV{'TEMPLETON_URL'};
     $globalHash->{'current_user'} = $ENV{'USER_NAME'};
+    $globalHash->{'DOAS_USER'} = $ENV{'DOAS_USER'};
     $globalHash->{'current_group_user'} = $ENV{'GROUP_USER_NAME'};
     $globalHash->{'current_other_user'} = $ENV{'OTHER_USER_NAME'};
     $globalHash->{'current_group'} = $ENV{'GROUP_NAME'};
@@ -317,6 +318,7 @@ sub replaceParametersInArg
     }
     my $outdir = $testCmd->{'outpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'};
     $arg =~ s/:UNAME:/$testCmd->{'current_user'}/g;
+    $arg =~ s/:DOAS:/$testCmd->{'DOAS_USER'}/g;
     $arg =~ s/:UNAME_GROUP:/$testCmd->{'current_group_user'}/g;
     $arg =~ s/:UNAME_OTHER:/$testCmd->{'current_other_user'}/g;
     $arg =~ s/:UGROUP:/$testCmd->{'current_group'}/g;

Modified: hive/branches/tez/hcatalog/webhcat/svr/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/pom.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/pom.xml (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/pom.xml Fri Aug 30 19:46:15 2013
@@ -36,7 +36,18 @@
     <url>http://maven.apache.org</url>
 
     <dependencies>
-
+        <!--
+        <dependency>
+            <groupId>xerces</groupId>
+            <artifactId>xercesImpl</artifactId>
+            <version>2.9.1</version>
+        </dependency>
+        <dependency>
+            <groupId>xalan</groupId>
+            <artifactId>xalan</artifactId>
+            <version>2.7.1</version>
+        </dependency>
+        -->
         <!-- provided scope - made available as separate package
           not packaged or added as dependency
         -->
@@ -74,12 +85,6 @@
             <scope>compile</scope>
         </dependency>
         <dependency>
-            <groupId>com.sun.jersey</groupId>
-            <artifactId>jersey-json</artifactId>
-            <version>${jersey.version}</version>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
             <groupId>org.codehaus.jackson</groupId>
             <artifactId>jackson-core-asl</artifactId>
             <version>${jackson.version}</version>

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Fri Aug 30 19:46:15 2013
@@ -234,5 +234,49 @@
     in the cluster are taken over by Templeton launcher tasks.
     </description>
   </property>
+  <!--
+  <property>
+    <name>webhcat.proxyuser.#USER#.hosts</name>
+    <value>www.example.com,host2</value>
+    <description>
+      List of client hosts from which the '#USER#' user is allowed to perform 
+      'doAs' operations.
 
+      The '#USER#' must be replaced with the username of the user who is
+      allowed to perform 'doAs' operations.
+
+      The value can be the '*' wildcard, which means every host is allowed,
+      or a comma-separated list of hostnames.
+   
+      If value is a blank string or webhcat.proxyuser.#USER#.hosts is missing,
+      no hosts will be allowed.  
+
+      For multiple users copy this property and replace the user name
+      in the property name.
+    </description>
+  </property>
+  <property>
+    <name>webhcat.proxyuser.#USER#.groups</name>
+    <value>group1, group2</value>
+    <description>
+      List of groups the '#USER#' user is allowed to impersonate users
+      from to perform 'doAs' operations.
+
+      The '#USER#' must be replaced with the username of the user who is
+      allowed to perform 'doAs' operations.
+
+      The value can be the '*' wildcard, which means any doAs value is
+      allowed, or a comma-separated list of groups.
+
+      If value is an empty list or webhcat.proxyuser.#USER#.groups is missing,
+      every doAs call value will fail.
+
+      For multiple users copy this property and replace the user name
+      in the property name.
+      
+      The username->usergroup mapping is performed using Hadoop API which is 
+      controlled by hadoop.security.group.mapping property.
+    </description>
+  </property>
+-->
 </configuration>

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java Fri Aug 30 19:46:15 2013
@@ -138,6 +138,7 @@ public class AppConfig extends Configura
         String hadoopConfDir = getHadoopConfDir();
         for (String fname : HADOOP_CONF_FILENAMES)
             loadOneFileConfig(hadoopConfDir, fname);
+        ProxyUserSupport.processProxyuserConfig(this);
     }
 
     public void startCleanup() {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java Fri Aug 30 19:46:15 2013
@@ -19,8 +19,10 @@
 package org.apache.hcatalog.templeton;
 
 import java.io.IOException;
+
+import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hcatalog.templeton.tool.JobState;
 
@@ -36,10 +38,10 @@ public class DeleteDelegator extends Tem
         throws NotAuthorizedException, BadParam, IOException, InterruptedException
     {
         UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
-        TempletonJobTracker tracker = null;
+        WebHCatJTShim tracker = null;
         JobState state = null;
         try {
-            tracker = new TempletonJobTracker(appConf);
+            tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
             JobID jobid = StatusDelegator.StringToJobID(id);
             if (jobid == null)
                 throw new BadParam("Invalid jobid: " + id);

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java Fri Aug 30 19:46:15 2013
@@ -41,8 +41,7 @@ import org.apache.hcatalog.templeton.too
  * launch child jobs.
  */
 public class LauncherDelegator extends TempletonDelegator {
-    private static final Log LOG = LogFactory.getLog(Server.class);
-    public static final String JAR_CLASS = TempletonControllerJob.class.getName();
+    private static final Log LOG = LogFactory.getLog(LauncherDelegator.class);
     protected String runAs = null;
 
     public LauncherDelegator(AppConfig appConf) {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java Fri Aug 30 19:46:15 2013
@@ -22,8 +22,9 @@ import java.io.IOException;
 import java.util.List;
 import java.util.ArrayList;
 
+import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hcatalog.templeton.tool.JobState;
 
@@ -39,9 +40,9 @@ public class ListDelegator extends Templ
         throws NotAuthorizedException, BadParam, IOException, InterruptedException {
         
         UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
-        TempletonJobTracker tracker = null;
+        WebHCatJTShim tracker = null;
         try {
-            tracker = new TempletonJobTracker(appConf);
+            tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
 
             ArrayList<String> ids = new ArrayList<String>();
 

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Server.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Server.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Server.java Fri Aug 30 19:46:15 2013
@@ -19,6 +19,9 @@
 package org.apache.hcatalog.templeton;
 
 import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -26,6 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.DELETE;
 import javax.ws.rs.FormParam;
 import javax.ws.rs.GET;
@@ -54,6 +58,7 @@ import org.apache.hcatalog.templeton.too
 @Path("/v1")
 public class Server {
     public static final String VERSION = "v1";
+    public static final String DO_AS_PARAM = "doAs";
 
     /**
      * The status message.  Always "ok"
@@ -113,6 +118,8 @@ public class Server {
     private
     @Context
     UriInfo theUriInfo;
+    private @QueryParam(DO_AS_PARAM) String doAs;
+    private @Context HttpServletRequest request;
 
     private static final Log LOG = LogFactory.getLog(Server.class);
 
@@ -161,7 +168,7 @@ public class Server {
         verifyParam(exec, "exec");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.run(getUser(), exec, false, group, permissions);
+        return d.run(getDoAsUser(), exec, false, group, permissions);
     }
 
     /**
@@ -180,7 +187,7 @@ public class Server {
         HcatDelegator d = new HcatDelegator(appConf, execService);
         if (!TempletonUtils.isset(tablePattern))
             tablePattern = "*";
-        return d.listTables(getUser(), db, tablePattern);
+        return d.listTables(getDoAsUser(), db, tablePattern);
     }
 
     /**
@@ -200,7 +207,7 @@ public class Server {
         desc.table = table;
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.createTable(getUser(), db, desc);
+        return d.createTable(getDoAsUser(), db, desc);
     }
 
     /**
@@ -223,7 +230,7 @@ public class Server {
         desc.newTable = newTable;
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.createTableLike(getUser(), db, desc);
+        return d.createTableLike(getDoAsUser(), db, desc);
     }
 
     /**
@@ -245,9 +252,9 @@ public class Server {
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
         if ("extended".equals(format))
-            return d.descExtendedTable(getUser(), db, table);
+            return d.descExtendedTable(getDoAsUser(), db, table);
         else
-            return d.descTable(getUser(), db, table, false);
+            return d.descTable(getDoAsUser(), db, table, false);
     }
 
     /**
@@ -268,7 +275,7 @@ public class Server {
         verifyDdlParam(table, ":table");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.dropTable(getUser(), db, table, ifExists, group, permissions);
+        return d.dropTable(getDoAsUser(), db, table, ifExists, group, permissions);
     }
 
     /**
@@ -290,7 +297,7 @@ public class Server {
         verifyDdlParam(newTable, "rename");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.renameTable(getUser(), db, oldTable, newTable, group, permissions);
+        return d.renameTable(getDoAsUser(), db, oldTable, newTable, group, permissions);
     }
 
     /**
@@ -310,7 +317,7 @@ public class Server {
         verifyDdlParam(property, ":property");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.descTableProperty(getUser(), db, table, property);
+        return d.descTableProperty(getDoAsUser(), db, table, property);
     }
 
     /**
@@ -328,7 +335,7 @@ public class Server {
         verifyDdlParam(table, ":table");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.listTableProperties(getUser(), db, table);
+        return d.listTableProperties(getDoAsUser(), db, table);
     }
 
     /**
@@ -350,7 +357,7 @@ public class Server {
         desc.name = property;
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.addOneTableProperty(getUser(), db, table, desc);
+        return d.addOneTableProperty(getDoAsUser(), db, table, desc);
     }
 
     /**
@@ -368,7 +375,7 @@ public class Server {
         verifyDdlParam(table, ":table");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.listPartitions(getUser(), db, table);
+        return d.listPartitions(getDoAsUser(), db, table);
     }
 
     /**
@@ -388,7 +395,7 @@ public class Server {
         verifyParam(partition, ":partition");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.descOnePartition(getUser(), db, table, partition);
+        return d.descOnePartition(getDoAsUser(), db, table, partition);
     }
 
     /**
@@ -409,7 +416,7 @@ public class Server {
         verifyParam(partition, ":partition");
         desc.partition = partition;
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.addOnePartition(getUser(), db, table, desc);
+        return d.addOnePartition(getDoAsUser(), db, table, desc);
     }
 
     /**
@@ -431,8 +438,8 @@ public class Server {
         verifyDdlParam(table, ":table");
         verifyParam(partition, ":partition");
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.dropPartition(getUser(), db, table, partition, ifExists,
-            group, permissions);
+        return d.dropPartition(getDoAsUser(), db, table, partition, ifExists,
+                group, permissions);
     }
 
     /**
@@ -449,7 +456,7 @@ public class Server {
         HcatDelegator d = new HcatDelegator(appConf, execService);
         if (!TempletonUtils.isset(dbPattern))
             dbPattern = "*";
-        return d.listDatabases(getUser(), dbPattern);
+        return d.listDatabases(getDoAsUser(), dbPattern);
     }
 
     /**
@@ -465,7 +472,7 @@ public class Server {
         verifyUser();
         verifyDdlParam(db, ":db");
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.descDatabase(getUser(), db, "extended".equals(format));
+        return d.descDatabase(getDoAsUser(), db, "extended".equals(format));
     }
 
     /**
@@ -482,7 +489,7 @@ public class Server {
         verifyDdlParam(db, ":db");
         desc.database = db;
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.createDatabase(getUser(), desc);
+        return d.createDatabase(getDoAsUser(), desc);
     }
 
     /**
@@ -503,8 +510,8 @@ public class Server {
         if (TempletonUtils.isset(option))
             verifyDdlParam(option, "option");
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.dropDatabase(getUser(), db, ifExists, option,
-            group, permissions);
+        return d.dropDatabase(getDoAsUser(), db, ifExists, option,
+                group, permissions);
     }
 
     /**
@@ -523,7 +530,7 @@ public class Server {
         verifyDdlParam(table, ":table");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.listColumns(getUser(), db, table);
+        return d.listColumns(getDoAsUser(), db, table);
     }
 
     /**
@@ -543,7 +550,7 @@ public class Server {
         verifyParam(column, ":column");
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.descOneColumn(getUser(), db, table, column);
+        return d.descOneColumn(getDoAsUser(), db, table, column);
     }
 
     /**
@@ -566,7 +573,7 @@ public class Server {
         desc.name = column;
 
         HcatDelegator d = new HcatDelegator(appConf, execService);
-        return d.addOneColumn(getUser(), db, table, desc);
+        return d.addOneColumn(getDoAsUser(), db, table, desc);
     }
 
     /**
@@ -593,7 +600,7 @@ public class Server {
         verifyParam(reducer, "reducer");
 
         StreamingDelegator d = new StreamingDelegator(appConf);
-        return d.run(getUser(), inputs, output, mapper, reducer,
+        return d.run(getDoAsUser(), inputs, output, mapper, reducer,
             files, defines, cmdenvs, args,
             statusdir, callback, getCompletedUrl());
     }
@@ -619,7 +626,7 @@ public class Server {
         verifyParam(mainClass, "class");
 
         JarDelegator d = new JarDelegator(appConf);
-        return d.run(getUser(),
+        return d.run(getDoAsUser(),
             jar, mainClass,
             libjars, files, args, defines,
             statusdir, callback, getCompletedUrl());
@@ -644,7 +651,7 @@ public class Server {
             throw new BadParam("Either execute or file parameter required");
 
         PigDelegator d = new PigDelegator(appConf);
-        return d.run(getUser(),
+        return d.run(getDoAsUser(),
             execute, srcFile,
             pigArgs, otherFiles,
             statusdir, callback, getCompletedUrl());
@@ -668,7 +675,7 @@ public class Server {
             throw new BadParam("Either execute or file parameter required");
 
         HiveDelegator d = new HiveDelegator(appConf);
-        return d.run(getUser(), execute, srcFile, defines,
+        return d.run(getDoAsUser(), execute, srcFile, defines,
             statusdir, callback, getCompletedUrl());
     }
 
@@ -685,7 +692,7 @@ public class Server {
         verifyParam(jobid, ":jobid");
 
         StatusDelegator d = new StatusDelegator(appConf);
-        return d.run(getUser(), jobid);
+        return d.run(getDoAsUser(), jobid);
     }
 
     /**
@@ -701,7 +708,7 @@ public class Server {
         verifyParam(jobid, ":jobid");
 
         DeleteDelegator d = new DeleteDelegator(appConf);
-        return d.run(getUser(), jobid);
+        return d.run(getDoAsUser(), jobid);
     }
 
     /**
@@ -716,7 +723,7 @@ public class Server {
         verifyUser();
 
         ListDelegator d = new ListDelegator(appConf);
-        return d.run(getUser());
+        return d.run(getDoAsUser());
     }
 
     /**
@@ -734,16 +741,30 @@ public class Server {
     /**
      * Verify that we have a valid user.  Throw an exception if invalid.
      */
-    public void verifyUser()
-        throws NotAuthorizedException {
-        if (getUser() == null) {
+    public void verifyUser() throws NotAuthorizedException {
+        String requestingUser = getRequestingUser();
+        if (requestingUser == null) {
             String msg = "No user found.";
             if (!UserGroupInformation.isSecurityEnabled())
                 msg += "  Missing " + PseudoAuthenticator.USER_NAME + " parameter.";
             throw new NotAuthorizedException(msg);
         }
+        if(doAs != null && !doAs.equals(requestingUser)) {
+            /*if doAs user is different than logged in user, need to check that
+            that logged in user is authorized to run as 'doAs'*/
+            ProxyUserSupport.validate(requestingUser, getRequestingHost(requestingUser, request), doAs);
+        }
+    }
+    /**
+     * All 'tasks' spawned by WebHCat should be run as this user.  W/o doAs query parameter
+     * this is just the user making the request (or 
+     * {@link org.apache.hadoop.security.authentication.client.PseudoAuthenticator#USER_NAME}
+     * query param).
+     * @return value of doAs query parameter or {@link #getRequestingUser()}
+     */
+    private String getDoAsUser() {
+        return doAs != null && !doAs.equals(getRequestingUser()) ? doAs : getRequestingUser();
     }
-
     /**
      * Verify that the parameter exists.  Throw an exception if invalid.
      */
@@ -777,16 +798,20 @@ public class Server {
         if (!m.matches())
             throw new BadParam("Invalid DDL identifier " + name);
     }
-
     /**
-     * Get the user name from the security context.
+     * Get the user name from the security context, i.e. the user making the HTTP request.
+     * With simple/pseudo security mode this should return the
+     * value of user.name query param, in kerberos mode it's the kinit'ed user.
      */
-    public String getUser() {
+    private String getRequestingUser() {
         if (theSecurityContext == null)
             return null;
         if (theSecurityContext.getUserPrincipal() == null)
             return null;
-        return theSecurityContext.getUserPrincipal().getName();
+        //map hue/foo.bar@something.com->hue since user group checks 
+        // and config files are in terms of short name
+        return UserGroupInformation.createRemoteUser(
+                theSecurityContext.getUserPrincipal().getName()).getShortUserName();
     }
 
     /**
@@ -800,4 +825,32 @@ public class Server {
         return theUriInfo.getBaseUri() + VERSION
             + "/internal/complete/$jobId";
     }
+    /**
+     * Returns canonical host name from which the request is made; used for doAs validation  
+     */
+    private static String getRequestingHost(String requestingUser, HttpServletRequest request) {
+        final String unkHost = "???";
+        if(request == null) {
+            LOG.warn("request is null; cannot determine hostname");
+            return unkHost;
+        }
+        try {
+            String address = request.getRemoteAddr();//returns IP addr
+            if(address == null) {
+                LOG.warn(MessageFormat.format("Request remote address is NULL for user [{0}]", requestingUser));
+                return unkHost;
+            }
+
+            //Inet4Address/Inet6Address
+            String hostName = InetAddress.getByName(address).getCanonicalHostName();
+            if(LOG.isDebugEnabled()) {
+                LOG.debug(MessageFormat.format("Resolved remote hostname: [{0}]", hostName));
+            }
+            return hostName;
+            
+        } catch (UnknownHostException ex) {
+            LOG.warn(MessageFormat.format("Request remote address could not be resolved, {0}", ex.toString(), ex));
+            return unkHost;
+        }
+    }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java Fri Aug 30 19:46:15 2013
@@ -22,10 +22,11 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.JobProfile;
 import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hcatalog.templeton.tool.JobState;
 
 /**
@@ -41,10 +42,10 @@ public class StatusDelegator extends Tem
     public QueueStatusBean run(String user, String id)
         throws NotAuthorizedException, BadParam, IOException, InterruptedException
     {
-        TempletonJobTracker tracker = null;
+        WebHCatJTShim tracker = null;
         JobState state = null;
         try {
-            tracker = new TempletonJobTracker(appConf);
+            tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
             JobID jobid = StatusDelegator.StringToJobID(id);
             if (jobid == null)
                 throw new BadParam("Invalid jobid: " + id);
@@ -60,7 +61,7 @@ public class StatusDelegator extends Tem
         }
     }
 
-    public static QueueStatusBean makeStatus(TempletonJobTracker tracker,
+    public static QueueStatusBean makeStatus(WebHCatJTShim tracker,
                                              JobID jobid,
                                              String childid,
                                              JobState state)
@@ -87,7 +88,7 @@ public class StatusDelegator extends Tem
         return new QueueStatusBean(state, status, profile);
     }
 
-    public static QueueStatusBean makeStatus(TempletonJobTracker tracker,
+    public static QueueStatusBean makeStatus(WebHCatJTShim tracker,
                                              JobID jobid,
                                              JobState state)
         throws BadParam, IOException {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/UgiFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/UgiFactory.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/UgiFactory.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/UgiFactory.java Fri Aug 30 19:46:15 2013
@@ -27,7 +27,7 @@ public class UgiFactory {
     private static ConcurrentHashMap<String, UserGroupInformation> userUgiMap =
         new ConcurrentHashMap<String, UserGroupInformation>();
 
-    static UserGroupInformation getUgi(String user) throws IOException {
+    public static UserGroupInformation getUgi(String user) throws IOException {
         UserGroupInformation ugi = userUgiMap.get(user);
         if (ugi == null) {
             //create new ugi and add to map

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/HDFSStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/HDFSStorage.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/HDFSStorage.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/HDFSStorage.java Fri Aug 30 19:46:15 2013
@@ -19,6 +19,7 @@
 package org.apache.hcatalog.templeton.tool;
 
 import java.io.BufferedReader;
+import java.io.Closeable;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
@@ -68,31 +69,29 @@ public class HDFSStorage implements Temp
             return;
         }
         PrintWriter out = null;
+        //todo: FileSystem#setPermission() - should this make sure to set 777 on jobs/ ?
+        Path keyfile= new Path(getPath(type) + "/" + id + "/" + key); 
         try {
-            Path keyfile = new Path(getPath(type) + "/" + id + "/" + key);
             // This will replace the old value if there is one
             // Overwrite the existing file
             out = new PrintWriter(new OutputStreamWriter(fs.create(keyfile)));
             out.write(val);
-        } catch (IOException e) {
-            LOG.info("Couldn't write to " + getPath(type) + "/" + id + ": "
-                + e.getMessage());
+            out.flush();
+        } catch (Exception e) {
+            String errMsg = "Couldn't write to " + keyfile + ": " + e.getMessage();
+            LOG.error(errMsg, e);
+            throw new NotFoundException(errMsg, e);
         } finally {
-            try {
-                out.flush();
-                out.close();
-            } catch (Exception e) {
-                // fail
-            }
+            close(out);
         }
     }
 
     @Override
     public String getField(Type type, String id, String key) {
         BufferedReader in = null;
+        Path p = new Path(getPath(type) + "/" + id + "/" + key);
         try {
-            in = new BufferedReader(new InputStreamReader(fs.open(new Path(getPath(type) + "/" +
-                id + "/" + key))));
+            in = new BufferedReader(new InputStreamReader(fs.open(p)));
             String line = null;
             String val = "";
             while ((line = in.readLine()) != null) {
@@ -102,15 +101,10 @@ public class HDFSStorage implements Temp
                 val += line;
             }
             return val;
-        } catch (IOException e) {
-            LOG.trace("Couldn't find " + getPath(type) + "/" + id + "/" + key
-                + ": " + e.getMessage());
+        } catch (Exception e) {
+            LOG.info("Couldn't find " + p + ": " + e.getMessage(), e);
         } finally {
-            try {
-                in.close();
-            } catch (Exception e) {
-                // fail
-            }
+            close(in);
         }
         return null;
     }
@@ -119,8 +113,9 @@ public class HDFSStorage implements Temp
     public Map<String, String> getFields(Type type, String id) {
         HashMap<String, String> map = new HashMap<String, String>();
         BufferedReader in = null;
+        Path p = new Path(getPath(type) + "/" + id);
         try {
-            for (FileStatus status : fs.listStatus(new Path(getPath(type) + "/" + id))) {
+            for (FileStatus status : fs.listStatus(p)) {
                 in = new BufferedReader(new InputStreamReader(fs.open(status.getPath())));
                 String line = null;
                 String val = "";
@@ -133,23 +128,20 @@ public class HDFSStorage implements Temp
                 map.put(status.getPath().getName(), val);
             }
         } catch (IOException e) {
-            LOG.trace("Couldn't find " + getPath(type) + "/" + id);
+            LOG.trace("Couldn't find " + p);
         } finally {
-            try {
-                in.close();
-            } catch (Exception e) {
-                // fail
-            }
+            close(in);
         }
         return map;
     }
 
     @Override
     public boolean delete(Type type, String id) throws NotFoundException {
+        Path p = new Path(getPath(type) + "/" + id);
         try {
-            fs.delete(new Path(getPath(type) + "/" + id), true);
+            fs.delete(p, true);
         } catch (IOException e) {
-            throw new NotFoundException("Node " + id + " was not found: " +
+            throw new NotFoundException("Node " + p + " was not found: " +
                 e.getMessage());
         }
         return false;
@@ -251,4 +243,15 @@ public class HDFSStorage implements Temp
         }
         return typepath;
     }
+    private void close(Closeable is) {
+        if(is == null) {
+            return;
+        }
+        try {
+            is.close();
+        }
+        catch (IOException ex) {
+            LOG.trace("Failed to close InputStream: " + ex.getMessage());
+        }
+    }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/NotFoundException.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/NotFoundException.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/NotFoundException.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/NotFoundException.java Fri Aug 30 19:46:15 2013
@@ -27,4 +27,7 @@ public class NotFoundException extends E
     public NotFoundException(String msg) {
         super(msg);
     }
+    public NotFoundException(String msg, Throwable rootCause) {
+        super(msg, rootCause);
+    }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java?rev=1519056&r1=1519055&r2=1519056&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java Fri Aug 30 19:46:15 2013
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hcatalog.templeton.UgiFactory;
 
 /**
  * General utility methods.
@@ -210,23 +211,19 @@ public class TempletonUtils {
         }
     }
 
-    public static Path hadoopFsPath(String fname, Configuration conf, String user)
-        throws URISyntaxException, FileNotFoundException, IOException,
+    public static Path hadoopFsPath(final String fname, final Configuration conf, String user)
+        throws URISyntaxException, IOException,
         InterruptedException {
         if (fname == null || conf == null) {
             return null;
         }
 
-        final Configuration fConf = new Configuration(conf);
-        final String finalFName = new String(fname);
-
-        UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+        UserGroupInformation ugi = UgiFactory.getUgi(user);
         final FileSystem defaultFs = 
                 ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
                     public FileSystem run() 
-                        throws URISyntaxException, FileNotFoundException, IOException,
-                            InterruptedException {
-                        return FileSystem.get(new URI(finalFName), fConf);
+                        throws URISyntaxException, IOException, InterruptedException {
+                        return FileSystem.get(new URI(fname), conf);
                     }
                 });