You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/10/23 05:59:30 UTC

svn commit: r1534903 [1/5] - in /hive/branches/tez: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/ hbase-handler/src/java/org/apache/hadoop/hive/hbase/ hcatalog/bin/ hcatalog/src/test/e2e/templeton/inpdir/ hcatalog/src/test/e2e/templ...

Author: gunther
Date: Wed Oct 23 03:59:27 2013
New Revision: 1534903

URL: http://svn.apache.org/r1534903
Log:
Merge latest trunk into branch. (Gunther Hagleitner)

Added:
    hive/branches/tez/data/files/input.txt
      - copied unchanged from r1534898, hive/trunk/data/files/input.txt
    hive/branches/tez/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig
      - copied unchanged from r1534898, hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/DelegationTokenCache.java
      - copied unchanged from r1534898, hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/DelegationTokenCache.java
    hive/branches/tez/ql/src/test/queries/clientnegative/compute_stats_long.q
      - copied unchanged from r1534898, hive/trunk/ql/src/test/queries/clientnegative/compute_stats_long.q
    hive/branches/tez/ql/src/test/queries/clientpositive/split.q
      - copied unchanged from r1534898, hive/trunk/ql/src/test/queries/clientpositive/split.q
    hive/branches/tez/ql/src/test/queries/clientpositive/stats_list_bucket.q
      - copied unchanged from r1534898, hive/trunk/ql/src/test/queries/clientpositive/stats_list_bucket.q
    hive/branches/tez/ql/src/test/results/clientnegative/compute_stats_long.q.out
      - copied unchanged from r1534898, hive/trunk/ql/src/test/results/clientnegative/compute_stats_long.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/split.q.out
      - copied unchanged from r1534898, hive/trunk/ql/src/test/results/clientpositive/split.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/stats_list_bucket.q.out
      - copied unchanged from r1534898, hive/trunk/ql/src/test/results/clientpositive/stats_list_bucket.q.out
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/tez/conf/hive-default.xml.template
    hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
    hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
    hive/branches/tez/hcatalog/bin/hcat
    hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
    hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
    hive/branches/tez/hwi/ivy.xml
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
    hive/branches/tez/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/archive_partspec3.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/clusterbyorderby.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/column_rename3.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view6.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_create_tbl2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_select_expression.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalid_tbl_name.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/lateral_view_join.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/ptf_window_boundaries.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/ptf_window_boundaries2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/select_charliteral.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/select_udtf_alias.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/set_table_property.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/show_columns2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/show_tables_bad1.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/show_tables_bad2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/uniquejoin3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/udf_split.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/garbage.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/invalid_create_table.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/invalid_select.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/macro_reserved_word.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/missing_overwrite.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/quoted_string.q.out
    hive/branches/tez/ql/src/test/results/compiler/errors/wrong_distinct2.q.out
    hive/branches/tez/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/cast1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby5.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input20.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input5.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input7.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input8.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input9.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input_part1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join5.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join7.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join8.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample5.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample7.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/subq.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/udf1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/udf6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/udf_case.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/udf_when.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/union.q.xml
    hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
    hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/branches/tez/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/branches/tez/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/tez/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/thrift/TUGIContainingTransport.java
    hive/branches/tez/testutils/ptest2/src/main/resources/batch-exec.vm
    hive/branches/tez/testutils/ptest2/src/main/resources/smart-apply-patch.sh
    hive/branches/tez/testutils/ptest2/src/main/resources/source-prep.vm

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1533638-1534898

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Oct 23 03:59:27 2013
@@ -635,7 +635,7 @@ public class HiveConf extends Configurat
     // standard error allowed for ndv estimates. A lower value indicates higher accuracy and a
     // higher compute cost.
     HIVE_STATS_NDV_ERROR("hive.stats.ndv.error", (float)20.0),
-    HIVE_STATS_KEY_PREFIX_MAX_LENGTH("hive.stats.key.prefix.max.length", 200),
+    HIVE_STATS_KEY_PREFIX_MAX_LENGTH("hive.stats.key.prefix.max.length", 150),
     HIVE_STATS_KEY_PREFIX("hive.stats.key.prefix", ""), // internal usage only
 
     // Concurrency
@@ -978,7 +978,7 @@ public class HiveConf extends Configurat
 
   public void verifyAndSet(String name, String value) throws IllegalArgumentException {
     if (restrictList.contains(name)) {
-      throw new IllegalArgumentException("Cann't modify " + name + " at runtime");
+      throw new IllegalArgumentException("Cannot modify " + name + " at runtime");
     }
     set(name, value);
   }
@@ -1122,6 +1122,7 @@ public class HiveConf extends Configurat
     hiveJar = other.hiveJar;
     auxJars = other.auxJars;
     origProp = (Properties)other.origProp.clone();
+    restrictList.addAll(other.restrictList);
   }
 
   public Properties getAllProperties() {

Modified: hive/branches/tez/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/tez/conf/hive-default.xml.template?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/conf/hive-default.xml.template (original)
+++ hive/branches/tez/conf/hive-default.xml.template Wed Oct 23 03:59:27 2013
@@ -2012,7 +2012,7 @@
 
 <property>
   <name>hive.server2.thrift.sasl.qop</name>
-  <value>auth</auth>
+  <value>auth</value>
   <description>Sasl QOP value; Set it to one of following values to enable higher levels of
      protection for hive server2 communication with clients.
       "auth" - authentication only (default)

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Wed Oct 23 03:59:27 2013
@@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.index.I
 import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
@@ -410,8 +411,8 @@ public class HBaseStorageHandler extends
         hbaseSerde.getStorageFormatOfCol(keyColPos).get(0));
     List<IndexSearchCondition> searchConditions =
       new ArrayList<IndexSearchCondition>();
-    ExprNodeDesc residualPredicate =
-      analyzer.analyzePredicate(predicate, searchConditions);
+    ExprNodeGenericFuncDesc residualPredicate =
+      (ExprNodeGenericFuncDesc)analyzer.analyzePredicate(predicate, searchConditions);
     int scSize = searchConditions.size();
     if (scSize < 1 || 2 < scSize) {
       // Either there was nothing which could be pushed down (size = 0),

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java Wed Oct 23 03:59:27 2013
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.ql.index.I
 import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ByteStream;
@@ -253,8 +254,8 @@ public class HiveHBaseTableInputFormat e
     if (filterExprSerialized == null) {
       return scan;
     }
-    ExprNodeDesc filterExpr =
-      Utilities.deserializeExpression(filterExprSerialized, jobConf);
+    ExprNodeGenericFuncDesc filterExpr =
+      Utilities.deserializeExpression(filterExprSerialized);
 
     String colName = jobConf.get(serdeConstants.LIST_COLUMNS).split(",")[iKey];
     String colType = jobConf.get(serdeConstants.LIST_COLUMN_TYPES).split(",")[iKey];

Modified: hive/branches/tez/hcatalog/bin/hcat
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/bin/hcat?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/bin/hcat (original)
+++ hive/branches/tez/hcatalog/bin/hcat Wed Oct 23 03:59:27 2013
@@ -129,6 +129,19 @@ for jar in ${HIVE_LIB_DIR}/*.jar ; do
 	HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$jar
 done
 
+# add the auxillary jars such as serdes
+if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
+  for f in ${HIVE_AUX_JARS_PATH}/*.jar; do
+    if [[ ! -f $f ]]; then
+        continue;
+    fi
+    HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f
+  done
+elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then 
+  HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
+  HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$HIVE_AUX_JARS_PATH
+fi
+
 # Put external jars, hcat jar, and config file in the classpath
 HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${HCAT_CLASSPATH}:${HCAT_JAR}:${HIVE_CONF_DIR}
 

Modified: hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf (original)
+++ hive/branches/tez/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Wed Oct 23 03:59:27 2013
@@ -234,7 +234,7 @@ $cfg = 
 
     {
                                 #a simple load store script with log enabled
-     'num' => 9,
+     'num' => 10,
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/pig',
      'post_options' => ['user.name=:UNAME:', 'arg=-p', 'arg=INPDIR=:INPDIR_HDFS:','arg=-p', 'arg=OUTDIR=:OUTDIR:', 'file=:INPDIR_HDFS:/loadstore.pig',
@@ -249,7 +249,31 @@ $cfg = 
      'check_call_back' => 1,
     },
 
-    #test 10
+    {
+    #note: this test will fail unless Hive is installed in the default location Pig expects it in
+    #HIVE-5547 will address this limitation
+     'num' => 11,
+     'setup' => [
+                 {
+                  'method' => 'POST',
+                  'url' => ':TEMPLETON_URL:/templeton/v1/ddl',
+                  'status_code' => 200,
+                  'post_options' => ['user.name=:UNAME:','exec=drop table if exists hcattest_pig; create table hcattest_pig(i int, j int) STORED AS textfile;'],
+                  'json_field_substr_match' => {'stderr' => 'OK'}
+                 }
+                ],
+     'method' => 'POST',
+     'url' => ':TEMPLETON_URL:/templeton/v1/pig',
+     'post_options' => ['user.name=:UNAME:', 'arg=-useHCatalog', 'arg=-p', 'arg=INPDIR=:INPDIR_HDFS:', 'arg=-p', 'arg= OUTDIR=:OUTDIR:', 'file=:INPDIR_HDFS:/hcatloadstore.pig'],
+     
+     'json_field_substr_match' => { 'id' => '\d+'},
+     'status_code' => 200,
+     'check_job_created' => 1,
+     'check_job_complete' => 'SUCCESS',
+     'check_job_exit_value' => 0,
+     'check_call_back' => 1,                                                                                                                                                     
+    },
+    #test 11
     #TODO jython test
 
 
@@ -420,7 +444,7 @@ $cfg = 
 
     {
                                 #test add jar
-     'num' => 9,
+     'num' => 11,
      'ignore23' => 'Log collector does not work with Hadoop 2',
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/hive',
@@ -435,7 +459,7 @@ $cfg = 
     },
     {
                                 #test add jar when the jar is not shipped
-     'num' => 10,
+     'num' => 12,
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/hive',
      'post_options' => ['user.name=:UNAME:','execute=add jar piggybank.jar',],
@@ -449,7 +473,7 @@ $cfg = 
     }, 
     {
                                 #enable logs
-     'num' => 11,
+     'num' => 13,
      'ignore23' => 'Log collector does not work with Hadoop 2',
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/hive',	

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh Wed Oct 23 03:59:27 2013
@@ -121,7 +121,7 @@ function check_pid() {
 
 # Start the webhcat server in the foreground
 function foreground_webhcat() {
-        $start_cmd
+        exec $start_cmd
 }
 
 # Start the webhcat server in the background.  Record the PID for

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java Wed Oct 23 03:59:27 2013
@@ -20,11 +20,15 @@ package org.apache.hive.hcatalog.templet
 
 import java.io.IOException;
 import java.net.URL;
-import java.net.MalformedURLException;
 import java.util.Date;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.hive.hcatalog.templeton.tool.DelegationTokenCache;
 import org.apache.hive.hcatalog.templeton.tool.JobState;
 import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
 
@@ -39,11 +43,12 @@ import org.apache.hive.hcatalog.templeto
  * this at the same time.  That should never happen.
  *
  * We use a Hadoop config var to notify this class on the completion
- * of a job.  Hadoop will call use multiple times in the event of
+ * of a job.  Hadoop will call us multiple times in the event of
  * failure.  Even if the failure is that the client callback failed.
  *
  * See LauncherDelegator for the HADOOP_END_RETRY* vars that are set.
  */
+@InterfaceAudience.Private
 public class CompleteDelegator extends TempletonDelegator {
   private static final Log LOG = LogFactory.getLog(CompleteDelegator.class);
 
@@ -51,28 +56,36 @@ public class CompleteDelegator extends T
     super(appConf);
   }
 
-  public CompleteBean run(String id)
+  public CompleteBean run(String id, String jobStatus)
     throws CallbackFailedException, IOException {
     if (id == null)
       acceptWithError("No jobid given");
 
     JobState state = null;
+    /* we don't want to cancel the delegation token if we think the callback is going to
+     to be retried, for example, because the job is not complete yet */
+    boolean cancelMetastoreToken = false;
     try {
       state = new JobState(id, Main.getAppConfigInstance());
       if (state.getCompleteStatus() == null)
-        failed("Job not yet complete. jobId=" + id, null);
+        failed("Job not yet complete. jobId=" + id + " Status from JT=" + jobStatus, null);
 
       Long notified = state.getNotifiedTime();
-      if (notified != null)
+      if (notified != null) {
+        cancelMetastoreToken = true;
         return acceptWithError("Callback already run for jobId=" + id +
                 " at " + new Date(notified));
+      }
 
       String callback = state.getCallback();
-      if (callback == null)
+      if (callback == null) {
+        cancelMetastoreToken = true;
         return new CompleteBean("No callback registered");
-
+      }
+      
       try {
         doCallback(state.getId(), callback);
+        cancelMetastoreToken = true;
       } catch (Exception e) {
         failed("Callback failed " + callback + " for " + id, e);
       }
@@ -80,8 +93,26 @@ public class CompleteDelegator extends T
       state.setNotifiedTime(System.currentTimeMillis());
       return new CompleteBean("Callback sent");
     } finally {
-      if (state != null)
-        state.close();
+      state.close();
+      HiveMetaStoreClient client = null;
+      try {
+        if(cancelMetastoreToken) {
+          String metastoreTokenStrForm =
+                  DelegationTokenCache.getStringFormTokenCache().getDelegationToken(id);
+          if(metastoreTokenStrForm != null) {
+            client = HCatUtil.getHiveClient(new HiveConf());
+            client.cancelDelegationToken(metastoreTokenStrForm);
+            LOG.debug("Cancelled token for jobId=" + id + " status from JT=" + jobStatus);
+            DelegationTokenCache.getStringFormTokenCache().removeDelegationToken(id);
+          }
+        }
+      }
+      catch(Exception ex) {
+        LOG.warn("Failed to cancel metastore delegation token for jobId=" + id, ex);
+      }
+      finally {
+        HCatUtil.closeHiveClientQuietly(client);
+      }
     }
   }
 
@@ -90,8 +121,7 @@ public class CompleteDelegator extends T
    * finished.  If the url has the string $jobId in it, it will be
    * replaced with the completed jobid.
    */
-  public static void doCallback(String jobid, String url)
-    throws MalformedURLException, IOException {
+  public static void doCallback(String jobid, String url) throws IOException {
     if (url.contains("$jobId"))
       url = url.replace("$jobId", jobid);
     TempletonUtils.fetchUrl(new URL(url));

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java Wed Oct 23 03:59:27 2013
@@ -65,6 +65,8 @@ public class HiveDelegator extends Launc
       args.addAll(makeBasicArgs(execute, srcFile, otherFiles, statusdir, completedUrl, enablelog));
       args.add("--");
       TempletonUtils.addCmdForWindows(args);
+      addHiveMetaStoreTokenArg();
+      
       args.add(appConf.hivePath());
 
       args.add("--service");
@@ -111,9 +113,10 @@ public class HiveDelegator extends Launc
     ArrayList<String> args = new ArrayList<String>();
 
     ArrayList<String> allFiles = new ArrayList<String>();
-    if (TempletonUtils.isset(srcFile))
+    if (TempletonUtils.isset(srcFile)) {
       allFiles.add(TempletonUtils.hadoopFsFilename(srcFile, appConf,
           runAs));
+    }
 
     if (TempletonUtils.isset(otherFiles)) {
       String[] ofs = TempletonUtils.hadoopFsListAsArray(otherFiles, appConf, runAs);

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java Wed Oct 23 03:59:27 2013
@@ -42,14 +42,15 @@ public class JarDelegator extends Launch
   public EnqueueBean run(String user, Map<String, Object> userArgs, String jar, String mainClass,
                String libjars, String files,
                List<String> jarArgs, List<String> defines,
-               String statusdir, String callback, String completedUrl,
+               String statusdir, String callback, 
+               boolean usehcatalog, String completedUrl,
                boolean enablelog, JobType jobType)
     throws NotAuthorizedException, BadParam, BusyException, QueueException,
     ExecuteException, IOException, InterruptedException {
     runAs = user;
     List<String> args = makeArgs(jar, mainClass,
       libjars, files, jarArgs, defines,
-      statusdir, completedUrl, enablelog, jobType);
+      statusdir, usehcatalog, completedUrl, enablelog, jobType);
 
     return enqueueController(user, userArgs, callback, args);
   }
@@ -57,23 +58,30 @@ public class JarDelegator extends Launch
   private List<String> makeArgs(String jar, String mainClass,
                   String libjars, String files,
                   List<String> jarArgs, List<String> defines,
-                  String statusdir, String completedUrl,
+                  String statusdir, boolean usehcatalog, String completedUrl,
                   boolean enablelog, JobType jobType)
     throws BadParam, IOException, InterruptedException {
     ArrayList<String> args = new ArrayList<String>();
     try {
-      ArrayList<String> allFiles = new ArrayList();
+      ArrayList<String> allFiles = new ArrayList<String>();
       allFiles.add(TempletonUtils.hadoopFsFilename(jar, appConf, runAs));
 
       args.addAll(makeLauncherArgs(appConf, statusdir,
         completedUrl, allFiles, enablelog, jobType));
       args.add("--");
       TempletonUtils.addCmdForWindows(args);
+
+      //check if the rest command specified explicitly to use hcatalog
+      if(usehcatalog){
+        addHiveMetaStoreTokenArg();
+      }
+
       args.add(appConf.clusterHadoop());
       args.add("jar");
       args.add(TempletonUtils.hadoopFsPath(jar, appConf, runAs).getName());
-      if (TempletonUtils.isset(mainClass))
+      if (TempletonUtils.isset(mainClass)) {
         args.add(mainClass);
+      }
       if (TempletonUtils.isset(libjars)) {
         String libjarsListAsString =
             TempletonUtils.hadoopFsListAsString(libjars, appConf, runAs);

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java Wed Oct 23 03:59:27 2013
@@ -24,10 +24,10 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.exec.ExecuteException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
@@ -44,7 +44,8 @@ import org.apache.hive.hcatalog.templeto
 public class LauncherDelegator extends TempletonDelegator {
   private static final Log LOG = LogFactory.getLog(LauncherDelegator.class);
   protected String runAs = null;
-  static public enum JobType {JAR, STREAMING, PIG, HIVE};
+  static public enum JobType {JAR, STREAMING, PIG, HIVE}
+  private boolean secureMeatastoreAccess = false;
 
   public LauncherDelegator(AppConfig appConf) {
     super(appConf);
@@ -70,7 +71,7 @@ public class LauncherDelegator extends T
    */
   public EnqueueBean enqueueController(String user, Map<String, Object> userArgs, String callback,
                      List<String> args)
-    throws NotAuthorizedException, BusyException, ExecuteException,
+    throws NotAuthorizedException, BusyException,
     IOException, QueueException {
     try {
       UserGroupInformation ugi = UgiFactory.getUgi(user);
@@ -82,9 +83,10 @@ public class LauncherDelegator extends T
       long elapsed = ((System.nanoTime() - startTime) / ((int) 1e6));
       LOG.debug("queued job " + id + " in " + elapsed + " ms");
 
-      if (id == null)
+      if (id == null) {
         throw new QueueException("Unable to get job id");
-
+      }
+      
       registerJob(id, user, callback, userArgs);
 
       return new EnqueueBean(id);
@@ -95,16 +97,14 @@ public class LauncherDelegator extends T
 
   private String queueAsUser(UserGroupInformation ugi, final List<String> args)
     throws IOException, InterruptedException {
-    String id = ugi.doAs(new PrivilegedExceptionAction<String>() {
+    return ugi.doAs(new PrivilegedExceptionAction<String>() {
       public String run() throws Exception {
         String[] array = new String[args.size()];
-        TempletonControllerJob ctrl = new TempletonControllerJob();
+        TempletonControllerJob ctrl = new TempletonControllerJob(secureMeatastoreAccess);
         ToolRunner.run(ctrl, args.toArray(array));
         return ctrl.getSubmittedId();
       }
     });
-
-    return id;
   }
 
   public List<String> makeLauncherArgs(AppConfig appConf, String statusdir,
@@ -182,8 +182,9 @@ public class LauncherDelegator extends T
    */
   public static String makeOverrideClasspath(AppConfig appConf) {
     String[] overrides = appConf.overrideJars();
-    if (overrides == null)
+    if (overrides == null) {
       return null;
+    }
 
     ArrayList<String> cp = new ArrayList<String>();
     for (String fname : overrides) {
@@ -204,5 +205,18 @@ public class LauncherDelegator extends T
       args.add(name + "=" + val);
     }
   }
-
+  /**
+   * This is called by subclasses when they determined that the sumbmitted job requires
+   * metastore access (e.g. Pig job that uses HCatalog).  This then determines if 
+   * secure access is required and causes TempletonControllerJob to set up a delegation token.
+   * @see TempletonControllerJob
+   */
+  void addHiveMetaStoreTokenArg() {
+    //in order for this to work hive-site.xml must be on the classpath
+    HiveConf hiveConf = new HiveConf();
+    if(!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL)) {
+      return;
+    }
+    secureMeatastoreAccess = true;
+  }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java Wed Oct 23 03:59:27 2013
@@ -27,6 +27,8 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
 import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
 
@@ -36,6 +38,7 @@ import org.apache.hive.hcatalog.templeto
  * This is the backend of the pig web service.
  */
 public class PigDelegator extends LauncherDelegator {
+  private static final Log LOG = LogFactory.getLog(PigDelegator.class);
   public PigDelegator(AppConfig appConf) {
     super(appConf);
   }
@@ -43,27 +46,43 @@ public class PigDelegator extends Launch
   public EnqueueBean run(String user, Map<String, Object> userArgs,
                String execute, String srcFile,
                List<String> pigArgs, String otherFiles,
-               String statusdir, String callback, String completedUrl, boolean enablelog)
+               String statusdir, String callback, 
+               boolean usehcatalog, String completedUrl, boolean enablelog)
     throws NotAuthorizedException, BadParam, BusyException, QueueException,
     ExecuteException, IOException, InterruptedException {
     runAs = user;
     List<String> args = makeArgs(execute,
       srcFile, pigArgs,
-      otherFiles, statusdir, completedUrl, enablelog);
+      otherFiles, statusdir, usehcatalog, completedUrl, enablelog);
 
     return enqueueController(user, userArgs, callback, args);
   }
 
+  /**
+   * @param execute pig query string to be executed
+   * @param srcFile pig query file to be executed
+   * @param pigArgs pig command line arguments
+   * @param otherFiles  files to be copied to the map reduce cluster
+   * @param statusdir status dir location
+   * @param usehcatalog whether the command uses hcatalog/needs to connect
+   *         to hive metastore server
+   * @param completedUrl call back url
+   * @return
+   * @throws BadParam
+   * @throws IOException
+   * @throws InterruptedException
+   */
   private List<String> makeArgs(String execute, String srcFile,
                   List<String> pigArgs, String otherFiles,
-                  String statusdir, String completedUrl, boolean enablelog)
+                  String statusdir, boolean usehcatalog,
+                  String completedUrl, boolean enablelog)
     throws BadParam, IOException, InterruptedException {
     ArrayList<String> args = new ArrayList<String>();
     try {
       ArrayList<String> allFiles = new ArrayList<String>();
-      if (TempletonUtils.isset(srcFile))
-        allFiles.add(TempletonUtils.hadoopFsFilename
-          (srcFile, appConf, runAs));
+      if (TempletonUtils.isset(srcFile)) {
+        allFiles.add(TempletonUtils.hadoopFsFilename(srcFile, appConf, runAs));
+      }
       if (TempletonUtils.isset(otherFiles)) {
         String[] ofs = TempletonUtils.hadoopFsListAsArray(otherFiles, appConf, runAs);
         allFiles.addAll(Arrays.asList(ofs));
@@ -85,6 +104,12 @@ public class PigDelegator extends Launch
       for (String pigArg : pigArgs) {
         args.add(TempletonUtils.quoteForWindows(pigArg));
       }
+      //check if the REST command specified explicitly to use hcatalog
+      // or if it says that implicitly using the pig -useHCatalog arg
+      if(usehcatalog || hasPigArgUseHcat(pigArgs)){
+        addHiveMetaStoreTokenArg();
+      }
+      
       if (TempletonUtils.isset(execute)) {
         args.add("-execute");
         args.add(TempletonUtils.quoteForWindows(execute));
@@ -101,4 +126,12 @@ public class PigDelegator extends Launch
 
     return args;
   }
+
+  /**
+   * Check if the pig arguments has -useHCatalog set
+   * see http://hive.apache.org/docs/hcat_r0.5.0/loadstore.pdf
+   */
+  private boolean hasPigArgUseHcat(List<String> pigArgs) {
+    return pigArgs.contains("-useHCatalog");
+  }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java Wed Oct 23 03:59:27 2013
@@ -33,10 +33,10 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.io.Text;
-import org.apache.thrift.TException;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.thrift.TException;
 
 /**
  * Helper class to run jobs using Kerberos security.  Always safe to
@@ -44,8 +44,8 @@ import org.apache.hadoop.security.token.
  */
 public class SecureProxySupport {
   private Path tokenPath;
-  private final String HCAT_SERVICE = "hcat";
-  private boolean isEnabled;
+  public static final String HCAT_SERVICE = "hcat";
+  private final boolean isEnabled;
   private String user;
 
   public SecureProxySupport() {

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Wed Oct 23 03:59:27 2013
@@ -186,8 +186,9 @@ public class Server {
     verifyDdlParam(db, ":db");
 
     HcatDelegator d = new HcatDelegator(appConf, execService);
-    if (!TempletonUtils.isset(tablePattern))
+    if (!TempletonUtils.isset(tablePattern)) {
       tablePattern = "*";
+    }
     return d.listTables(getDoAsUser(), db, tablePattern);
   }
 
@@ -252,10 +253,12 @@ public class Server {
     verifyDdlParam(table, ":table");
 
     HcatDelegator d = new HcatDelegator(appConf, execService);
-    if ("extended".equals(format))
+    if ("extended".equals(format)) {
       return d.descExtendedTable(getDoAsUser(), db, table);
-    else
+    }
+    else {
       return d.descTable(getDoAsUser(), db, table, false);
+    }
   }
 
   /**
@@ -455,8 +458,9 @@ public class Server {
     verifyUser();
 
     HcatDelegator d = new HcatDelegator(appConf, execService);
-    if (!TempletonUtils.isset(dbPattern))
+    if (!TempletonUtils.isset(dbPattern)) {
       dbPattern = "*";
+    }
     return d.listDatabases(getDoAsUser(), dbPattern);
   }
 
@@ -508,8 +512,9 @@ public class Server {
     BadParam, ExecuteException, IOException {
     verifyUser();
     verifyDdlParam(db, ":db");
-    if (TempletonUtils.isset(option))
+    if (TempletonUtils.isset(option)) {
       verifyDdlParam(option, "option");
+    }
     HcatDelegator d = new HcatDelegator(appConf, execService);
     return d.dropDatabase(getDoAsUser(), db, ifExists, option,
         group, permissions);
@@ -579,6 +584,7 @@ public class Server {
 
   /**
    * Run a MapReduce Streaming job.
+   * @param callback URL which WebHCat will call when the hive job finishes
    */
   @POST
   @Path("mapreduce/streaming")
@@ -628,6 +634,11 @@ public class Server {
 
   /**
    * Run a MapReduce Jar job.
+   * Params correspond to the REST api params
+   * @param  usehcatalog if {@code true}, means the Jar uses HCat and thus needs to access 
+   *    metastore, which requires additional steps for WebHCat to perform in a secure cluster.  
+   * @param callback URL which WebHCat will call when the hive job finishes
+   * @see org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob
    */
   @POST
   @Path("mapreduce/jar")
@@ -640,6 +651,7 @@ public class Server {
                   @FormParam("define") List<String> defines,
                   @FormParam("statusdir") String statusdir,
                   @FormParam("callback") String callback,
+                  @FormParam("usehcatalog") boolean usehcatalog,
                   @FormParam("enablelog") boolean enablelog)
     throws NotAuthorizedException, BusyException, BadParam, QueueException,
     ExecuteException, IOException, InterruptedException {
@@ -665,11 +677,18 @@ public class Server {
     return d.run(getDoAsUser(), userArgs,
       jar, mainClass,
       libjars, files, args, defines,
-      statusdir, callback, getCompletedUrl(), enablelog, JobType.JAR);
+      statusdir, callback, usehcatalog, getCompletedUrl(), enablelog, JobType.JAR);
   }
 
   /**
    * Run a Pig job.
+   * Params correspond to the REST api params.  If '-useHCatalog' is in the {@code pigArgs, usehcatalog}, 
+   * is interpreted as true.
+   * @param  usehcatalog if {@code true}, means the Pig script uses HCat and thus needs to access 
+   *    metastore, which requires additional steps for WebHCat to perform in a secure cluster.
+   *    This does nothing to ensure that Pig is installed on target node in the cluster.
+   * @param callback URL which WebHCat will call when the hive job finishes
+   * @see org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob
    */
   @POST
   @Path("pig")
@@ -680,12 +699,14 @@ public class Server {
                @FormParam("files") String otherFiles,
                @FormParam("statusdir") String statusdir,
                @FormParam("callback") String callback,
+               @FormParam("usehcatalog") boolean usehcatalog,
                @FormParam("enablelog") boolean enablelog)
     throws NotAuthorizedException, BusyException, BadParam, QueueException,
     ExecuteException, IOException, InterruptedException {
     verifyUser();
-    if (execute == null && srcFile == null)
+    if (execute == null && srcFile == null) {
       throw new BadParam("Either execute or file parameter required");
+    }
     
     //add all function arguments to a map
     Map<String, Object> userArgs = new HashMap<String, Object>();
@@ -704,7 +725,7 @@ public class Server {
     return d.run(getDoAsUser(), userArgs,
       execute, srcFile,
       pigArgs, otherFiles,
-      statusdir, callback, getCompletedUrl(), enablelog);
+      statusdir, callback, usehcatalog, getCompletedUrl(), enablelog);
   }
 
   /**
@@ -719,7 +740,7 @@ public class Server {
    *                   used in "add jar" statement in hive script
    * @param defines    shortcut for command line arguments "--define"
    * @param statusdir  where the stderr/stdout of templeton controller job goes
-   * @param callback   callback url when the hive job finishes
+   * @param callback   URL which WebHCat will call when the hive job finishes
    * @param enablelog  whether to collect mapreduce log into statusdir/logs
    */
   @POST
@@ -736,8 +757,9 @@ public class Server {
     throws NotAuthorizedException, BusyException, BadParam, QueueException,
     ExecuteException, IOException, InterruptedException {
     verifyUser();
-    if (execute == null && srcFile == null)
+    if (execute == null && srcFile == null) {
       throw new BadParam("Either execute or file parameter required");
+    }
     
     //add all function arguments to a map
     Map<String, Object> userArgs = new HashMap<String, Object>();
@@ -874,10 +896,12 @@ public class Server {
   @GET
   @Path("internal/complete/{jobid}")
   @Produces({MediaType.APPLICATION_JSON})
-  public CompleteBean completeJob(@PathParam("jobid") String jobid)
+  public CompleteBean completeJob(@PathParam("jobid") String jobid,
+                                  @QueryParam("status") String jobStatus)
     throws CallbackFailedException, IOException {
+    LOG.debug("Received callback " + theUriInfo.getRequestUri());
     CompleteDelegator d = new CompleteDelegator(appConf);
-    return d.run(jobid);
+    return d.run(jobid, jobStatus);
   }
 
   /**
@@ -887,8 +911,9 @@ public class Server {
     String requestingUser = getRequestingUser();
     if (requestingUser == null) {
       String msg = "No user found.";
-      if (!UserGroupInformation.isSecurityEnabled())
+      if (!UserGroupInformation.isSecurityEnabled()) {
         msg += "  Missing " + PseudoAuthenticator.USER_NAME + " parameter.";
+      }
       throw new NotAuthorizedException(msg);
     }
     if(doAs != null && !doAs.equals(requestingUser)) {
@@ -897,9 +922,10 @@ public class Server {
       ProxyUserSupport.validate(requestingUser, getRequestingHost(requestingUser, request), doAs);
     }
   }
+
   /**
    * All 'tasks' spawned by WebHCat should be run as this user.  W/o doAs query parameter
-   * this is just the user making the request (or 
+   * this is just the user making the request (or
    * {@link org.apache.hadoop.security.authentication.client.PseudoAuthenticator#USER_NAME}
    * query param).
    * @return value of doAs query parameter or {@link #getRequestingUser()}
@@ -912,8 +938,9 @@ public class Server {
    */
   public void verifyParam(String param, String name)
     throws BadParam {
-    if (param == null)
+    if (param == null) {
       throw new BadParam("Missing " + name + " parameter");
+    }
   }
 
   /**
@@ -921,8 +948,9 @@ public class Server {
    */
   public void verifyParam(List<String> param, String name)
     throws BadParam {
-    if (param == null || param.isEmpty())
+    if (param == null || param.isEmpty()) {
       throw new BadParam("Missing " + name + " parameter");
+    }
   }
 
   public static final Pattern DDL_ID = Pattern.compile("[a-zA-Z]\\w*");
@@ -937,8 +965,9 @@ public class Server {
     throws BadParam {
     verifyParam(param, name);
     Matcher m = DDL_ID.matcher(param);
-    if (!m.matches())
+    if (!m.matches()) {
       throw new BadParam("Invalid DDL identifier " + name);
+    }
   }
   /**
    * Get the user name from the security context, i.e. the user making the HTTP request.
@@ -946,10 +975,12 @@ public class Server {
    * value of user.name query param, in kerberos mode it's the kinit'ed user.
    */
   private String getRequestingUser() {
-    if (theSecurityContext == null)
+    if (theSecurityContext == null) { 
       return null;
-    if (theSecurityContext.getUserPrincipal() == null)
+    }
+    if (theSecurityContext.getUserPrincipal() == null) {
       return null;
+    }
     //map hue/foo.bar@something.com->hue since user group checks 
     // and config files are in terms of short name
     return UserGroupInformation.createRemoteUser(
@@ -960,16 +991,18 @@ public class Server {
    * The callback url on this server when a task is completed.
    */
   public String getCompletedUrl() {
-    if (theUriInfo == null)
+    if (theUriInfo == null) {
       return null;
-    if (theUriInfo.getBaseUri() == null)
+    }
+    if (theUriInfo.getBaseUri() == null) {
       return null;
+    }
     return theUriInfo.getBaseUri() + VERSION
-      + "/internal/complete/$jobId";
+      + "/internal/complete/$jobId?status=$jobStatus";
   }
 
   /**
-   * Returns canonical host name from which the request is made; used for doAs validation  
+   * Returns canonical host name from which the request is made; used for doAs validation
    */
   private static String getRequestingHost(String requestingUser, HttpServletRequest request) {
     final String unkHost = "???";
@@ -998,7 +1031,7 @@ public class Server {
   }
   
   private void checkEnableLogPrerequisite(boolean enablelog, String statusdir) throws BadParam {
-    if (enablelog == true && !TempletonUtils.isset(statusdir))
+    if (enablelog && !TempletonUtils.isset(statusdir))
       throw new BadParam("enablelog is only applicable when statusdir is set");
   }
 }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java Wed Oct 23 03:59:27 2013
@@ -58,7 +58,7 @@ public class StreamingDelegator extends 
     return d.run(user, userArgs,
       appConf.streamingJar(), null,
       null, files, args, defines,
-      statusdir, callback, completedUrl, enableLog, jobType);
+      statusdir, callback, false, completedUrl, enableLog, jobType);
   }
 
   private List<String> makeArgs(List<String> inputs,

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java Wed Oct 23 03:59:27 2013
@@ -25,6 +25,7 @@ import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.PrintWriter;
 import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
@@ -41,6 +42,8 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
@@ -55,9 +58,11 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
 import org.apache.hive.hcatalog.templeton.BadParam;
 import org.apache.hive.hcatalog.templeton.LauncherDelegator;
+import org.apache.hive.hcatalog.templeton.SecureProxySupport;
+import org.apache.hive.hcatalog.templeton.UgiFactory;
+import org.apache.thrift.TException;
 
 /**
  * A Map Reduce job that will start another job.
@@ -70,6 +75,13 @@ import org.apache.hive.hcatalog.templeto
  * - run a keep alive thread so the job doesn't end.
  * - Optionally, store the stdout, stderr, and exit value of the child
  *   in hdfs files.
+ *   
+ * A note on security.  When jobs are submitted through WebHCat that use HCatalog, it means that
+ * metastore access is required.  Hive queries, of course, need metastore access.  This in turn
+ * requires delegation token to be obtained for metastore in a <em>secure cluster</em>.  Since we
+ * can't usually parse the job to find out if it is using metastore, we require 'usehcatalog'
+ * parameter supplied in the REST call.  WebHcat takes care of cancelling the token when the job
+ * is complete.
  */
 public class TempletonControllerJob extends Configured implements Tool {
   public static final String COPY_NAME = "templeton.copy";
@@ -89,12 +101,19 @@ public class TempletonControllerJob exte
   public static final String TOKEN_FILE_ARG_PLACEHOLDER 
     = "__WEBHCAT_TOKEN_FILE_LOCATION__";
 
-
   private static TrivialExecService execService = TrivialExecService.getInstance();
 
   private static final Log LOG = LogFactory.getLog(TempletonControllerJob.class);
+  private final boolean secureMetastoreAccess;
 
-
+  /**
+   * @param secureMetastoreAccess - if true, a delegation token will be created
+   *                              and added to the job
+   */
+  public TempletonControllerJob(boolean secureMetastoreAccess) {
+    super();
+    this.secureMetastoreAccess = secureMetastoreAccess;
+  }
   public static class LaunchMapper
     extends Mapper<NullWritable, NullWritable, Text, Text> {
     protected Process startJob(Context context, String user,
@@ -194,8 +213,9 @@ public class TempletonControllerJob exte
       proc.waitFor();
       keepAlive.sendReport = false;
       pool.shutdown();
-      if (!pool.awaitTermination(WATCHER_TIMEOUT_SECS, TimeUnit.SECONDS))
+      if (!pool.awaitTermination(WATCHER_TIMEOUT_SECS, TimeUnit.SECONDS)) {
         pool.shutdownNow();
+      }
 
       writeExitValue(conf, proc.exitValue(), statusdir);
       JobState state = new JobState(context.getJobID().toString(), conf);
@@ -210,11 +230,13 @@ public class TempletonControllerJob exte
         logRetriever.run();
       }
 
-      if (proc.exitValue() != 0)
+      if (proc.exitValue() != 0) {
         System.err.println("templeton: job failed with exit code "
           + proc.exitValue());
-      else
+      }
+      else {
         System.err.println("templeton: job completed with exit code 0");
+      }
     }
 
     private void executeWatcher(ExecutorService pool, Configuration conf,
@@ -248,10 +270,10 @@ public class TempletonControllerJob exte
   }
 
   private static class Watcher implements Runnable {
-    private InputStream in;
+    private final InputStream in;
     private OutputStream out;
-    private JobID jobid;
-    private Configuration conf;
+    private final JobID jobid;
+    private final Configuration conf;
 
     public Watcher(Configuration conf, JobID jobid, InputStream in,
              String statusdir, String name)
@@ -341,21 +363,26 @@ public class TempletonControllerJob exte
   private JobID submittedJobId;
 
   public String getSubmittedId() {
-    if (submittedJobId == null)
+    if (submittedJobId == null) {
       return null;
-    else
+    }
+    else {
       return submittedJobId.toString();
+    }
   }
 
   /**
    * Enqueue the job and print out the job id for later collection.
+   * @see org.apache.hive.hcatalog.templeton.CompleteDelegator
    */
   @Override
   public int run(String[] args)
-    throws IOException, InterruptedException, ClassNotFoundException {
+    throws IOException, InterruptedException, ClassNotFoundException, TException {
     Configuration conf = getConf();
+    
     conf.set(JAR_ARGS_NAME, TempletonUtils.encodeArray(args));
-    conf.set("user.name", UserGroupInformation.getCurrentUser().getShortUserName());
+    String user = UserGroupInformation.getCurrentUser().getShortUserName();
+    conf.set("user.name", user);
     Job job = new Job(conf);
     job.setJarByClass(TempletonControllerJob.class);
     job.setJobName("TempletonControllerJob");
@@ -363,8 +390,7 @@ public class TempletonControllerJob exte
     job.setMapOutputKeyClass(Text.class);
     job.setMapOutputValueClass(Text.class);
     job.setInputFormatClass(SingleInputFormat.class);
-    NullOutputFormat<NullWritable, NullWritable> of
-      = new NullOutputFormat<NullWritable, NullWritable>();
+    NullOutputFormat<NullWritable, NullWritable> of = new NullOutputFormat<NullWritable, NullWritable>();
     job.setOutputFormatClass(of.getClass());
     job.setNumReduceTasks(0);
 
@@ -372,18 +398,51 @@ public class TempletonControllerJob exte
 
     Token<DelegationTokenIdentifier> mrdt = jc.getDelegationToken(new Text("mr token"));
     job.getCredentials().addToken(new Text("mr token"), mrdt);
+
+    String metastoreTokenStrForm = addHMSToken(job, user);
+
     job.submit();
 
     submittedJobId = job.getJobID();
 
+    if(metastoreTokenStrForm != null) {
+      //so that it can be cancelled later from CompleteDelegator
+      DelegationTokenCache.getStringFormTokenCache().storeDelegationToken(
+              submittedJobId.toString(), metastoreTokenStrForm);
+      LOG.debug("Added metastore delegation token for jobId=" + submittedJobId.toString() + " " +
+              "user=" + user);
+    }
     return 0;
   }
-
-
-  public static void main(String[] args) throws Exception {
-    int ret = ToolRunner.run(new TempletonControllerJob(), args);
-    if (ret != 0)
-      System.err.println("TempletonControllerJob failed!");
-    System.exit(ret);
+  private String addHMSToken(Job job, String user) throws IOException, InterruptedException,
+          TException {
+    if(!secureMetastoreAccess) {
+      return null;
+    }
+    Token<org.apache.hadoop.hive.thrift.DelegationTokenIdentifier> hiveToken =
+            new Token<org.apache.hadoop.hive.thrift.DelegationTokenIdentifier>();
+    String metastoreTokenStrForm = buildHcatDelegationToken(user);
+    hiveToken.decodeFromUrlString(metastoreTokenStrForm);
+    job.getCredentials().addToken(new
+            Text(SecureProxySupport.HCAT_SERVICE), hiveToken);
+    return metastoreTokenStrForm;
+  }
+  private String buildHcatDelegationToken(String user) throws IOException, InterruptedException,
+          TException {
+    final HiveConf c = new HiveConf();
+    LOG.debug("Creating hive metastore delegation token for user " + user);
+    final UserGroupInformation ugi = UgiFactory.getUgi(user);
+    UserGroupInformation real = ugi.getRealUser();
+    return real.doAs(new PrivilegedExceptionAction<String>() {
+      public String run() throws IOException, TException, InterruptedException  {
+        final HiveMetaStoreClient client = new HiveMetaStoreClient(c);
+        return ugi.doAs(new PrivilegedExceptionAction<String>() {
+          public String run() throws IOException, TException, InterruptedException {
+            String u = ugi.getUserName();
+            return client.getDelegationToken(u);
+          }
+        });
+      }
+    });
   }
 }

Modified: hive/branches/tez/hwi/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hwi/ivy.xml?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/hwi/ivy.xml (original)
+++ hive/branches/tez/hwi/ivy.xml Wed Oct 23 03:59:27 2013
@@ -29,6 +29,8 @@
     <dependency org="org.apache.hive" name="hive-cli" rev="${version}"
                 conf="compile->default" />
     <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}" />
+    <dependency org="org.apache.ant" name="ant" rev="${apacheant.version}" />
+    <dependency org="org.apache.ant" name="ant-launcher" rev="${apacheant.version}" />
 
     <!-- Test Dependencies -->
     <dependency org="commons-httpclient" name="commons-httpclient" rev="${commons-httpclient.version}"

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java Wed Oct 23 03:59:27 2013
@@ -83,10 +83,10 @@ public class ExprNodeGenericFuncEvaluato
 
   public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) throws HiveException {
     super(expr);
-    children = new ExprNodeEvaluator[expr.getChildExprs().size()];
+    children = new ExprNodeEvaluator[expr.getChildren().size()];
     isEager = false;
     for (int i = 0; i < children.length; i++) {
-      ExprNodeDesc child = expr.getChildExprs().get(i);
+      ExprNodeDesc child = expr.getChildren().get(i);
       ExprNodeEvaluator nodeEvaluator = ExprNodeEvaluatorFactory.get(child);
       children[i] = nodeEvaluator;
       // If we have eager evaluators anywhere below us, then we are eager too.

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Wed Oct 23 03:59:27 2013
@@ -1114,7 +1114,7 @@ public class FileSinkOperator extends Te
       }
     }
     String keyPrefix = Utilities.getHashedStatsPrefix(
-        conf.getStatsAggPrefix() + spSpec + newFspKey + Path.SEPARATOR,
+        conf.getStatsAggPrefix() + spSpec + newFspKey,
         conf.getMaxStatsKeyPrefixLength());
     key = keyPrefix + storedAsDirPostFix + taskID;
     return key;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java Wed Oct 23 03:59:27 2013
@@ -301,7 +301,7 @@ public class TableScanOperator extends O
         // In case of a partition, the key for temp storage is
         // "tableName + partitionSpecs + taskID"
         String keyPrefix = Utilities.getHashedStatsPrefix(
-            conf.getStatsAggPrefix() + pspecs + Path.SEPARATOR, conf.getMaxStatsKeyPrefixLength());
+            conf.getStatsAggPrefix() + pspecs, conf.getMaxStatsKeyPrefixLength());
         key = keyPrefix + taskID;
       }
       for(String statType : stats.get(pspecs).getStoredStats()) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Oct 23 03:59:27 2013
@@ -133,6 +133,7 @@ import org.apache.hadoop.hive.ql.parse.S
 import org.apache.hadoop.hive.ql.plan.BaseWork;
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
 import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -623,10 +624,10 @@ public final class Utilities {
    * @param expr Expression.
    * @return Bytes.
    */
-  public static byte[] serializeExpressionToKryo(ExprNodeDesc expr) {
+  public static byte[] serializeExpressionToKryo(ExprNodeGenericFuncDesc expr) {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     Output output = new Output(baos);
-    runtimeSerializationKryo.get().writeClassAndObject(output, expr);
+    runtimeSerializationKryo.get().writeObject(output, expr);
     output.close();
     return baos.toByteArray();
   }
@@ -636,47 +637,30 @@ public final class Utilities {
    * @param bytes Bytes containing the expression.
    * @return Expression; null if deserialization succeeded, but the result type is incorrect.
    */
-  public static ExprNodeDesc deserializeExpressionFromKryo(byte[] bytes) {
+  public static ExprNodeGenericFuncDesc deserializeExpressionFromKryo(byte[] bytes) {
     Input inp = new Input(new ByteArrayInputStream(bytes));
-    Object o = runtimeSerializationKryo.get().readClassAndObject(inp);
+    ExprNodeGenericFuncDesc func = runtimeSerializationKryo.get().
+      readObject(inp,ExprNodeGenericFuncDesc.class);
     inp.close();
-    return (o instanceof ExprNodeDesc) ? (ExprNodeDesc)o : null;
+    return func;
   }
 
-  public static String serializeExpression(ExprNodeDesc expr) {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    XMLEncoder encoder = new XMLEncoder(baos);
-    encoder.setPersistenceDelegate(java.sql.Date.class, new DatePersistenceDelegate());
-    encoder.setPersistenceDelegate(Timestamp.class, new TimestampPersistenceDelegate());
-    try {
-      encoder.writeObject(expr);
-    } finally {
-      encoder.close();
-    }
+  public static String serializeExpression(ExprNodeGenericFuncDesc expr) {
     try {
-      return baos.toString("UTF-8");
+      return new String(Base64.encodeBase64(serializeExpressionToKryo(expr)), "UTF-8");
     } catch (UnsupportedEncodingException ex) {
       throw new RuntimeException("UTF-8 support required", ex);
     }
   }
 
-  public static ExprNodeDesc deserializeExpression(String s, Configuration conf) {
+  public static ExprNodeGenericFuncDesc deserializeExpression(String s) {
     byte[] bytes;
     try {
-      bytes = s.getBytes("UTF-8");
+      bytes = Base64.decodeBase64(s.getBytes("UTF-8"));
     } catch (UnsupportedEncodingException ex) {
       throw new RuntimeException("UTF-8 support required", ex);
     }
-
-    ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
-
-    XMLDecoder decoder = new XMLDecoder(bais, null, null);
-    try {
-      ExprNodeDesc expr = (ExprNodeDesc) decoder.readObject();
-      return expr;
-    } finally {
-      decoder.close();
-    }
+    return deserializeExpressionFromKryo(bytes);
   }
 
   public static class CollectionPersistenceDelegate extends DefaultPersistenceDelegate {
@@ -694,6 +678,26 @@ public final class Utilities {
     }
   }
 
+  /**
+   * Kryo serializer for timestamp.
+   */
+  private static class TimestampSerializer extends
+  com.esotericsoftware.kryo.Serializer<Timestamp> {
+
+    @Override
+    public Timestamp read(Kryo kryo, Input input, Class<Timestamp> clazz) {
+      Timestamp ts = new Timestamp(input.readLong());
+      ts.setNanos(input.readInt());
+      return ts;
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, Timestamp ts) {
+      output.writeLong(ts.getTime());
+      output.writeInt(ts.getNanos());
+    }
+  }
+
    /** Custom Kryo serializer for sql date, otherwise Kryo gets confused between
    java.sql.Date and java.util.Date while deserializing
    */
@@ -864,6 +868,7 @@ public final class Utilities {
       Kryo kryo = new Kryo();
       kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
       kryo.register(java.sql.Date.class, new SqlDateSerializer());
+      kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
       removeField(kryo, Operator.class, "colExprMap");
       removeField(kryo, ColumnInfo.class, "objectInspector");
       removeField(kryo, MapWork.class, "opParseCtxMap");
@@ -884,6 +889,7 @@ public final class Utilities {
       kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
       kryo.register(CommonToken.class, new CommonTokenSerializer());
       kryo.register(java.sql.Date.class, new SqlDateSerializer());
+      kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
       return kryo;
     };
   };
@@ -2336,11 +2342,11 @@ public final class Utilities {
    * @return
    */
   public static String getHashedStatsPrefix(String statsPrefix, int maxPrefixLength) {
-    String ret = statsPrefix;
+    String ret = appendPathSeparator(statsPrefix);
     if (maxPrefixLength >= 0 && statsPrefix.length() > maxPrefixLength) {
       try {
         MessageDigest digester = MessageDigest.getInstance("MD5");
-        digester.update(statsPrefix.getBytes());
+        digester.update(ret.getBytes());
         ret = new String(digester.digest()) + Path.SEPARATOR;
       } catch (NoSuchAlgorithmException e) {
         throw new RuntimeException(e);
@@ -2349,6 +2355,13 @@ public final class Utilities {
     return ret;
   }
 
+  private static String appendPathSeparator(String path) {
+    if (!path.endsWith(Path.SEPARATOR)) {
+      path = path + Path.SEPARATOR;
+    }
+    return path;
+  }
+
   public static void setColumnNameList(JobConf jobConf, Operator op) {
     RowSchema rowSchema = op.getSchema();
     if (rowSchema == null) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java Wed Oct 23 03:59:27 2013
@@ -116,10 +116,14 @@ public class VectorMapJoinOperator exten
 
     List<String> outColNames = desc.getOutputColumnNames();
     int outputColumnIndex = 0;
+
+    Map<String, Integer> cMap = vContext.getColumnMap();
     for(byte alias:order) {
       for(ExprNodeDesc expr: exprs.get(alias)) {
         String columnName = outColNames.get(outputColumnIndex);
-        vContext.addOutputColumn(columnName, expr.getTypeString());
+        if (!cMap.containsKey(columnName)) {
+          vContext.addOutputColumn(columnName, expr.getTypeString());
+        }
         ++outputColumnIndex;
       }
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Wed Oct 23 03:59:27 2013
@@ -321,7 +321,7 @@ public class VectorizationContext {
         ve = getCustomUDFExpression(expr);
       } else {
         ve = getVectorExpression(expr.getGenericUDF(),
-            expr.getChildExprs());
+            expr.getChildren());
       }
     } else if (exprDesc instanceof ExprNodeConstantDesc) {
       ve = getConstantVectorExpression((ExprNodeConstantDesc) exprDesc);
@@ -376,7 +376,7 @@ public class VectorizationContext {
 
   // Return the type string of the first argument (argument 0).
   public static String arg0Type(ExprNodeGenericFuncDesc expr) {
-    String type = expr.getChildExprs().get(0).getTypeString();
+    String type = expr.getChildren().get(0).getTypeString();
     return type;
   }
 
@@ -1109,10 +1109,10 @@ public class VectorizationContext {
       throws HiveException {
 
     //GenericUDFBridge udfBridge = (GenericUDFBridge) expr.getGenericUDF();
-    List<ExprNodeDesc> childExprList = expr.getChildExprs();
+    List<ExprNodeDesc> childExprList = expr.getChildren();
 
     // argument descriptors
-    VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[expr.getChildExprs().size()];
+    VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[expr.getChildren().size()];
     for (int i = 0; i < argDescs.length; i++) {
       argDescs[i] = new VectorUDFArgDesc();
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java Wed Oct 23 03:59:27 2013
@@ -92,9 +92,9 @@ public class VectorUDFAdaptor extends Ve
   // Initialize transient fields. To be called after deserialization of other fields.
   public void init() throws HiveException, UDFArgumentException {
     genericUDF = expr.getGenericUDF();
-    deferredChildren = new GenericUDF.DeferredObject[expr.getChildExprs().size()];
-    childrenOIs = new ObjectInspector[expr.getChildExprs().size()];
-    writers = VectorExpressionWriterFactory.getExpressionWriters(expr.getChildExprs());
+    deferredChildren = new GenericUDF.DeferredObject[expr.getChildren().size()];
+    childrenOIs = new ObjectInspector[expr.getChildren().size()];
+    writers = VectorExpressionWriterFactory.getExpressionWriters(expr.getChildren());
     for (int i = 0; i < childrenOIs.length; i++) {
       childrenOIs[i] = writers[i].getObjectInspector();
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java Wed Oct 23 03:59:27 2013
@@ -26,6 +26,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -44,12 +46,10 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -62,7 +62,7 @@ import org.apache.hadoop.util.Reflection
 public class IndexPredicateAnalyzer
 {
   private static final Log LOG = LogFactory.getLog(IndexPredicateAnalyzer.class.getName());
-  private Set<String> udfNames;
+  private final Set<String> udfNames;
 
   private Set<String> allowedColumnNames;
 
@@ -135,7 +135,7 @@ public class IndexPredicateAnalyzer
           }
         }
 
-        return analyzeExpr((ExprNodeDesc) nd, searchConditions, nodeOutputs);
+        return analyzeExpr((ExprNodeGenericFuncDesc) nd, searchConditions, nodeOutputs);
       }
     };
 
@@ -155,13 +155,11 @@ public class IndexPredicateAnalyzer
   }
 
   private ExprNodeDesc analyzeExpr(
-    ExprNodeDesc expr,
+    ExprNodeGenericFuncDesc expr,
     List<IndexSearchCondition> searchConditions,
     Object... nodeOutputs) {
 
-    if (!(expr instanceof ExprNodeGenericFuncDesc)) {
-      return expr;
-    }
+    expr = (ExprNodeGenericFuncDesc) expr;
     if (FunctionRegistry.isOpAnd(expr)) {
       assert(nodeOutputs.length == 2);
       ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
@@ -182,12 +180,11 @@ public class IndexPredicateAnalyzer
     }
 
     String udfName;
-    ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr;
-    if (funcDesc.getGenericUDF() instanceof GenericUDFBridge) {
-      GenericUDFBridge func = (GenericUDFBridge) funcDesc.getGenericUDF();
+    if (expr.getGenericUDF() instanceof GenericUDFBridge) {
+      GenericUDFBridge func = (GenericUDFBridge) expr.getGenericUDF();
       udfName = func.getUdfName();
     } else {
-      udfName = funcDesc.getGenericUDF().getClass().getName();
+      udfName = expr.getGenericUDF().getClass().getName();
     }
     if (!udfNames.contains(udfName)) {
       return expr;
@@ -255,7 +252,7 @@ public class IndexPredicateAnalyzer
         }
       }
 
-      for (ExprNodeDesc child : func.getChildExprs()) {
+      for (ExprNodeDesc child : func.getChildren()) {
         if (child instanceof ExprNodeConstantDesc) {
           continue;
         } else if (child instanceof ExprNodeGenericFuncDesc) {
@@ -283,12 +280,12 @@ public class IndexPredicateAnalyzer
    *
    * @param searchConditions (typically produced by analyzePredicate)
    *
-   * @return ExprNodeDesc form of search conditions
+   * @return ExprNodeGenericFuncDesc form of search conditions
    */
-  public ExprNodeDesc translateSearchConditions(
+  public ExprNodeGenericFuncDesc translateSearchConditions(
     List<IndexSearchCondition> searchConditions) {
 
-    ExprNodeDesc expr = null;
+    ExprNodeGenericFuncDesc expr = null;
     for (IndexSearchCondition searchCondition : searchConditions) {
       if (expr == null) {
         expr = searchCondition.getComparisonExpr();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java Wed Oct 23 03:59:27 2013
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.ql.index;
 
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 
 /**
  * IndexSearchCondition represents an individual search condition
@@ -31,7 +31,7 @@ public class IndexSearchCondition
   private ExprNodeColumnDesc columnDesc;
   private String comparisonOp;
   private ExprNodeConstantDesc constantDesc;
-  private ExprNodeDesc comparisonExpr;
+  private ExprNodeGenericFuncDesc comparisonExpr;
 
   /**
    * Constructs a search condition, which takes the form
@@ -50,7 +50,7 @@ public class IndexSearchCondition
     ExprNodeColumnDesc columnDesc,
     String comparisonOp,
     ExprNodeConstantDesc constantDesc,
-    ExprNodeDesc comparisonExpr) {
+    ExprNodeGenericFuncDesc comparisonExpr) {
 
     this.columnDesc = columnDesc;
     this.comparisonOp = comparisonOp;
@@ -82,11 +82,11 @@ public class IndexSearchCondition
     return constantDesc;
   }
 
-  public void setComparisonExpr(ExprNodeDesc comparisonExpr) {
+  public void setComparisonExpr(ExprNodeGenericFuncDesc comparisonExpr) {
     this.comparisonExpr = comparisonExpr;
   }
 
-  public ExprNodeDesc getComparisonExpr() {
+  public ExprNodeGenericFuncDesc getComparisonExpr() {
     return comparisonExpr;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java Wed Oct 23 03:59:27 2013
@@ -316,7 +316,8 @@ public class CompactIndexHandler extends
     IndexPredicateAnalyzer analyzer = getIndexPredicateAnalyzer(index, queryPartitions);
     List<IndexSearchCondition> searchConditions = new ArrayList<IndexSearchCondition>();
     // split predicate into pushed (what we can handle), and residual (what we can't handle)
-    ExprNodeDesc residualPredicate = analyzer.analyzePredicate(predicate, searchConditions);
+    ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc)analyzer.
+      analyzePredicate(predicate, searchConditions);
 
     if (searchConditions.size() == 0) {
       return null;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java?rev=1534903&r1=1534902&r2=1534903&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java Wed Oct 23 03:59:27 2013
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.exec.Op
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
@@ -389,7 +389,7 @@ public class HiveInputFormat<K extends W
     Utilities.setColumnNameList(jobConf, tableScan);
     Utilities.setColumnTypeList(jobConf, tableScan);
     // push down filters
-    ExprNodeDesc filterExpr = scanDesc.getFilterExpr();
+    ExprNodeGenericFuncDesc filterExpr = (ExprNodeGenericFuncDesc)scanDesc.getFilterExpr();
     if (filterExpr == null) {
       return;
     }