You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/23 22:00:11 UTC

svn commit: r1627140 [1/3] - in /hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/ contrib/src/java/org/apache/hadoop/h...

Author: gunther
Date: Tue Sep 23 20:00:10 2014
New Revision: 1627140

URL: http://svn.apache.org/r1627140
Log:
HIVE-7946: Merge CBO changes to Trunk (Laljo John Pullokkaran, Ashutosh Chauhan, Harish Butani, Sergey Shelukhin, Pengcheng Xiong, Julian Hyde via Gunther Hagleitner)

Added:
    hive/trunk/data/files/cbo_t1.txt
      - copied unchanged from r1627125, hive/branches/cbo/data/files/cbo_t1.txt
    hive/trunk/data/files/cbo_t2.txt
      - copied unchanged from r1627125, hive/branches/cbo/data/files/cbo_t2.txt
    hive/trunk/data/files/cbo_t3.txt
      - copied unchanged from r1627125, hive/branches/cbo/data/files/cbo_t3.txt
    hive/trunk/data/files/cbo_t4.txt
      - copied unchanged from r1627125, hive/branches/cbo/data/files/cbo_t4.txt
    hive/trunk/data/files/cbo_t5.txt
      - copied unchanged from r1627125, hive/branches/cbo/data/files/cbo_t5.txt
    hive/trunk/data/files/cbo_t6.txt
      - copied unchanged from r1627125, hive/branches/cbo/data/files/cbo_t6.txt
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/
      - copied from r1627125, hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/
    hive/trunk/ql/src/test/queries/clientpositive/cbo_correctness.q
      - copied unchanged from r1627125, hive/branches/cbo/ql/src/test/queries/clientpositive/cbo_correctness.q
    hive/trunk/ql/src/test/results/clientpositive/cbo_correctness.q.out
      - copied unchanged from r1627125, hive/branches/cbo/ql/src/test/results/clientpositive/cbo_correctness.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/cbo_correctness.q.out
      - copied unchanged from r1627125, hive/branches/cbo/ql/src/test/results/clientpositive/tez/cbo_correctness.q.out
Modified:
    hive/trunk/   (props changed)
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
    hive/trunk/hbase-handler/pom.xml   (props changed)
    hive/trunk/itests/src/test/resources/testconfiguration.properties
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
    hive/trunk/ql/pom.xml
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java
    hive/trunk/ql/src/test/queries/clientpositive/create_func1.q
    hive/trunk/ql/src/test/results/clientpositive/create_func1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/auto_join0.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/auto_join1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/mrr.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/tez_bmj_schema_evolution.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/tez_union.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_left_outer_join.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out

Propchange: hive/trunk/
------------------------------------------------------------------------------
  Merged /hive/branches/cbo:r1605012-1627125

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Sep 23 20:00:10 2014
@@ -640,6 +640,9 @@ public class HiveConf extends Configurat
     HIVEJOINCACHESIZE("hive.join.cache.size", 25000,
         "How many rows in the joining tables (except the streaming table) should be cached in memory."),
 
+    // CBO related
+    HIVE_CBO_ENABLED("hive.cbo.enable", false, "Flag to control enabling Cost Based Optimizations using Optiq framework."),
+
     // hive.mapjoin.bucket.cache.size has been replaced by hive.smbjoin.cache.row,
     // need to remove by hive .13. Also, do not change default (see SMB operator)
     HIVEMAPJOINBUCKETCACHESIZE("hive.mapjoin.bucket.cache.size", 100, ""),

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.contrib.udaf.example;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
@@ -32,6 +33,8 @@ import org.apache.hadoop.hive.ql.exec.UD
  * more efficient.
  * 
  */
+@Description(name = "example_avg",
+value = "_FUNC_(col) - Example UDAF to compute average")
 public final class UDAFExampleAvg extends UDAF {
 
   /**

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java Tue Sep 23 20:00:10 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.contrib.u
 import java.util.ArrayList;
 import java.util.Collections;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
@@ -35,6 +36,8 @@ import org.apache.hadoop.hive.ql.exec.UD
  * implement built-in aggregation functions, which are harder to program but
  * more efficient.
  */
+@Description(name = "example_group_concat",
+value = "_FUNC_(col) - Example UDAF that concatenates all arguments from different rows into a single string")
 public class UDAFExampleGroupConcat extends UDAF {
 
   /**

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java Tue Sep 23 20:00:10 2014
@@ -19,11 +19,13 @@
 
 package org.apache.hadoop.hive.contrib.udaf.example;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 
 /**
  * Returns the max N double values.
  */
+@Description(name = "example_max_n", value = "_FUNC_(expr) - Example UDAF that returns the max N double values")
 public class UDAFExampleMaxN extends UDAF {
 
   /**

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java Tue Sep 23 20:00:10 2014
@@ -19,11 +19,13 @@
 
 package org.apache.hadoop.hive.contrib.udaf.example;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 
 /**
  * Returns the min N double values.
  */
+@Description(name = "example_min_n", value = "_FUNC_(expr) - Example UDAF that returns the min N double values")
 public class UDAFExampleMinN extends UDAF{
 
   /**

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java Tue Sep 23 20:00:10 2014
@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.hive.contrib.udf.example;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleAdd.
  *
  */
+@Description(name = "example_add", value = "_FUNC_(expr) - Example UDAF that returns the sum")
 public class UDFExampleAdd extends UDF {
 
   public Integer evaluate(Integer... a) {

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java Tue Sep 23 20:00:10 2014
@@ -19,12 +19,14 @@ package org.apache.hadoop.hive.contrib.u
 
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleArraySum.
  *
  */
+@Description(name = "example_arraysum", value = "_FUNC_(expr) - Example UDAF that returns the sum")
 public class UDFExampleArraySum extends UDF {
 
   public Double evaluate(List<Double> a) {

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java Tue Sep 23 20:00:10 2014
@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.hive.contrib.udf.example;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleFormat.
  *
  */
+@Description(name = "example_format", value = "_FUNC_(expr) - Example UDAF that returns formated String")
 public class UDFExampleFormat extends UDF {
 
   public String evaluate(String format, Object... args) {

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java Tue Sep 23 20:00:10 2014
@@ -21,12 +21,15 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleMapConcat.
  *
  */
+@Description(name = "example_mapconcat",
+value = "_FUNC_(expr) - Example UDAF that returns contents of Map as a formated String")
 public class UDFExampleMapConcat extends UDF {
 
   public String evaluate(Map<String, String> a) {

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java Tue Sep 23 20:00:10 2014
@@ -19,12 +19,15 @@ package org.apache.hadoop.hive.contrib.u
 
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleStructPrint.
  *
  */
+@Description(name = "example_structprint",
+value = "_FUNC_(obj) - Example UDAF that returns contents of an object")
 public class UDFExampleStructPrint extends UDF {
 
   public String evaluate(Object a) {

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java Tue Sep 23 20:00:10 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.contrib.u
 
 import java.util.ArrayList;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
@@ -34,6 +35,8 @@ import org.apache.hadoop.hive.serde2.obj
  * to test outputting of rows on close with lateral view.
  *
  */
+@Description(name = "udtfCount2",
+value = "_FUNC_(col) - UDF outputs the number of rows seen, twice.")
 public class GenericUDTFCount2 extends GenericUDTF {
 
   private transient Integer count = Integer.valueOf(0);

Propchange: hive/trunk/hbase-handler/pom.xml
------------------------------------------------------------------------------
  Merged /hive/branches/cbo/hbase-handler/pom.xml:r1605012-1627125

Modified: hive/trunk/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/trunk/itests/src/test/resources/testconfiguration.properties?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/trunk/itests/src/test/resources/testconfiguration.properties Tue Sep 23 20:00:10 2014
@@ -55,6 +55,7 @@ minitez.query.files.shared=alter_merge_2
   bucket2.q,\
   bucket3.q,\
   bucket4.q,\
+  cbo_correctness.q,\
   correlationoptimizer1.q,\
   count.q,\
   create_merge_compressed.q,\
@@ -344,6 +345,7 @@ beeline.positive.exclude=add_part_exist.
 
 minimr.query.negative.files=cluster_tasklog_retrieval.q,\
   file_with_header_footer_negative.q,\
+  local_mapred_error_cache.q,\
   mapreduce_stack_trace.q,\
   mapreduce_stack_trace_hadoop20.q,\
   mapreduce_stack_trace_turnoff.q,\

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -32,6 +33,8 @@ import org.apache.hadoop.io.Text;
  * UDAFTestMax.
  *
  */
+@Description(name = "test_max",
+value = "_FUNC_(col) - UDF to report Max Value")
 public class UDAFTestMax extends UDAF {
 
   /**

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java Tue Sep 23 20:00:10 2014
@@ -27,6 +27,7 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -34,6 +35,8 @@ import org.apache.hadoop.io.Text;
 /**
  * A UDF for testing, which does key/value lookup from a file
  */
+@Description(name = "lookup",
+value = "_FUNC_(col) - UDF for key/value lookup from a file")
 public class UDFFileLookup extends UDF {
   static Log LOG = LogFactory.getLog(UDFFileLookup.class);
 

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java Tue Sep 23 20:00:10 2014
@@ -18,11 +18,14 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * A UDF for testing, which throws RuntimeException if  the length of a string.
  */
+@Description(name = "test_error",
+value = "_FUNC_(col) - UDF throws RuntimeException if  expression evaluates to false")
 public class UDFTestErrorOnFalse extends UDF {
 
   public int evaluate(Boolean b) {

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -25,6 +26,8 @@ import org.apache.hadoop.io.Text;
 /**
  * A UDF for testing, which evaluates the length of a string.
  */
+@Description(name = "testlength",
+value = "_FUNC_(col) - UDF evaluates the length of the string")
 public class UDFTestLength extends UDF {
 
   IntWritable result = new IntWritable();

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java Tue Sep 23 20:00:10 2014
@@ -18,12 +18,15 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * A UDF for testing, which evaluates the length of a string. This UDF uses Java
  * Primitive classes for parameters.
  */
+@Description(name = "testlength2",
+value = "_FUNC_(col) - UDF evaluates the length of the string and returns value as Java Integer")
 public class UDFTestLength2 extends UDF {
 
   public Integer evaluate(String s) {

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -26,7 +27,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.Reporter;
-
+@Description(name = "counter",
+value = "_FUNC_(col) - UDF to report MR counter values")
 public class DummyContextUDF extends GenericUDF {
 
   private MapredContext context;

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -27,6 +28,8 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * A test GenericUDF to return native Java's boolean type
  */
+@Description(name = "test_udf_get_java_boolean",
+value = "_FUNC_(str) - GenericUDF to return native Java's boolean type")
 public class GenericUDFTestGetJavaBoolean extends GenericUDF {
   ObjectInspector[] argumentOIs;
 

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -27,6 +28,8 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * A test GenericUDF to return native Java's string type
  */
+@Description(name = "test_udf_get_java_string",
+value = "_FUNC_(str) - GenericUDF to return native Java's string type")
 public class GenericUDFTestGetJavaString extends GenericUDF {
   ObjectInspector[] argumentOIs;
 

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Tue Sep 23 20:00:10 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.udf.ge
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -34,6 +35,8 @@ import org.apache.hadoop.io.Text;
 /**
  * Mimics oracle's function translate(str1, str2, str3).
  */
+@Description(name = "test_translate",
+value = "_FUNC_(str1, str2, str3) - Mimics oracle's function translate(str1, str2, str3)")
 public class GenericUDFTestTranslate extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;
 

Modified: hive/trunk/ql/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/pom.xml?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/pom.xml (original)
+++ hive/trunk/ql/pom.xml Tue Sep 23 20:00:10 2014
@@ -28,6 +28,7 @@
   <name>Hive Query Language</name>
 
   <properties>
+    <optiq.version>0.9.1-incubating-SNAPSHOT</optiq.version>
     <hive.path.to.root>..</hive.path.to.root>
   </properties>
 
@@ -182,6 +183,42 @@
       <version>${datanucleus-core.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.optiq</groupId>
+      <artifactId>optiq-core</artifactId>
+      <version>${optiq.version}</version>
+      <exclusions>
+        <!-- hsqldb interferes with the use of derby as the default db
+          in hive's use of datanucleus. 
+        -->
+        <exclusion>
+          <groupId>org.hsqldb</groupId>
+          <artifactId>hsqldb</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>   
+    <dependency>
+      <groupId>org.apache.optiq</groupId>
+      <artifactId>optiq-avatica</artifactId>
+      <version>${optiq.version}</version>
+      <exclusions>
+        <!-- hsqldb interferes with the use of derby as the default db
+          in hive's use of datanucleus. 
+        -->
+        <exclusion>
+          <groupId>org.hsqldb</groupId>
+          <artifactId>hsqldb</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
       <version>${guava.version}</version>

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java Tue Sep 23 20:00:10 2014
@@ -48,12 +48,37 @@ public class QueryProperties {
   boolean mapJoinRemoved = false;
   boolean hasMapGroupBy = false;
 
+  private int noOfJoins = 0;
+  private int noOfOuterJoins = 0;
+  private boolean hasLateralViews;
+  
+  private boolean multiDestQuery;
+  private boolean filterWithSubQuery;
+  
   public boolean hasJoin() {
-    return hasJoin;
+    return (noOfJoins > 0);
   }
 
-  public void setHasJoin(boolean hasJoin) {
-    this.hasJoin = hasJoin;
+  public void incrementJoinCount(boolean outerJoin) {
+    noOfJoins++;
+    if (outerJoin)
+      noOfOuterJoins++;
+  }
+
+  public int getJoinCount() {
+    return noOfJoins;
+  }
+
+  public int getOuterJoinCount() {
+    return noOfOuterJoins;
+  }
+
+  public void setHasLateralViews(boolean hasLateralViews) {
+    this.hasLateralViews = hasLateralViews;
+  }
+
+  public boolean hasLateralViews() {
+    return hasLateralViews;
   }
 
   public boolean hasGroupBy() {
@@ -144,6 +169,22 @@ public class QueryProperties {
     this.hasMapGroupBy = hasMapGroupBy;
   }
 
+  public boolean hasMultiDestQuery() {
+    return this.multiDestQuery;
+  }
+
+  public void setMultiDestQuery(boolean multiDestQuery) {
+    this.multiDestQuery = multiDestQuery;
+  }
+
+  public void setFilterWithSubQuery(boolean filterWithSubQuery) {
+    this.filterWithSubQuery = filterWithSubQuery;
+  }
+
+  public boolean hasFilterWithSubQuery() {
+    return this.filterWithSubQuery;
+  }
+
   public void clear() {
     hasJoin = false;
     hasGroupBy = false;
@@ -160,5 +201,11 @@ public class QueryProperties {
     hasClusterBy = false;
     mapJoinRemoved = false;
     hasMapGroupBy = false;
+
+    noOfJoins = 0;
+    noOfOuterJoins = 0;
+    
+    multiDestQuery = false;
+    filterWithSubQuery = false;
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Tue Sep 23 20:00:10 2014
@@ -639,6 +639,14 @@ public final class FunctionRegistry {
     }
   }
 
+  public static String getNormalizedFunctionName(String fn) {
+    // Does the same thing as getFunctionInfo, except for getting the function info.
+    fn = fn.toLowerCase();
+    return (FunctionUtils.isQualifiedFunctionName(fn) || mFunctions.get(fn) != null) ? fn
+        : FunctionUtils.qualifyFunctionName(
+            fn, SessionState.get().getCurrentDatabase().toLowerCase());
+  }
+
   private static <T extends CommonFunctionInfo> T getFunctionInfo(
       Map<String, T> mFunctions, String functionName) {
     functionName = functionName.toLowerCase();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Tue Sep 23 20:00:10 2014
@@ -57,6 +57,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
  * The transformation step that does partition pruning.
@@ -155,27 +156,85 @@ public class PartitionPruner implements 
    *         pruner condition.
    * @throws HiveException
    */
-  private static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr,
+  public static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr,
       HiveConf conf, String alias, Map<String, PrunedPartitionList> prunedPartitionsMap)
           throws SemanticException {
+
     LOG.trace("Started pruning partiton");
     LOG.trace("dbname = " + tab.getDbName());
     LOG.trace("tabname = " + tab.getTableName());
-    LOG.trace("prune Expression = " + prunerExpr);
+    LOG.trace("prune Expression = " + prunerExpr == null ? "" : prunerExpr);
 
     String key = tab.getDbName() + "." + tab.getTableName() + ";";
 
-    if (prunerExpr != null) {
-      key = key + prunerExpr.getExprString();
+    if (!tab.isPartitioned()) {
+      // If the table is not partitioned, return empty list.
+      return getAllPartsFromCacheOrServer(tab, key, false, prunedPartitionsMap);
+    }
+
+    if ("strict".equalsIgnoreCase(HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE))
+        && !hasColumnExpr(prunerExpr)) {
+      // If the "strict" mode is on, we have to provide partition pruner for each table.
+      throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE
+          .getMsg("for Alias \"" + alias + "\" Table \"" + tab.getTableName() + "\""));
+    }
+
+    if (prunerExpr == null) {
+      // In non-strict mode and there is no predicates at all - get everything.
+      return getAllPartsFromCacheOrServer(tab, key, false, prunedPartitionsMap);
+    }
+
+    Set<String> partColsUsedInFilter = new LinkedHashSet<String>();
+    // Replace virtual columns with nulls. See javadoc for details.
+    prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab), partColsUsedInFilter);
+    // Remove all parts that are not partition columns. See javadoc for details.
+    ExprNodeGenericFuncDesc compactExpr = (ExprNodeGenericFuncDesc)compactExpr(prunerExpr.clone());
+    String oldFilter = prunerExpr.getExprString();
+    if (compactExpr == null) {
+      // Non-strict mode, and all the predicates are on non-partition columns - get everything.
+      LOG.debug("Filter " + oldFilter + " was null after compacting");
+      return getAllPartsFromCacheOrServer(tab, key, true, prunedPartitionsMap);
+    }
+    LOG.debug("Filter w/ compacting: " + compactExpr.getExprString()
+        + "; filter w/o compacting: " + oldFilter);
+
+    key = key + compactExpr.getExprString();
+    PrunedPartitionList ppList = prunedPartitionsMap.get(key);
+    if (ppList != null) {
+      return ppList;
+    }
+
+    ppList = getPartitionsFromServer(tab, compactExpr, conf, alias, partColsUsedInFilter, oldFilter.equals(compactExpr.getExprString()));
+    prunedPartitionsMap.put(key, ppList);
+    return ppList;
+  }
+
+  private static PrunedPartitionList getAllPartsFromCacheOrServer(Table tab, String key, boolean unknownPartitions,
+    Map<String, PrunedPartitionList> partsCache)  throws SemanticException {
+    PrunedPartitionList ppList = partsCache.get(key);
+    if (ppList != null) {
+      return ppList;
     }
-    PrunedPartitionList ret = prunedPartitionsMap.get(key);
-    if (ret != null) {
-      return ret;
+    Set<Partition> parts;
+    try {
+      parts = getAllPartitions(tab);
+    } catch (HiveException e) {
+      throw new SemanticException(e);
     }
+    ppList = new PrunedPartitionList(tab, parts, null, unknownPartitions);
+    partsCache.put(key, ppList);
+    return ppList;
+  }
 
-    ret = getPartitionsFromServer(tab, prunerExpr, conf, alias);
-    prunedPartitionsMap.put(key, ret);
-    return ret;
+  private static ExprNodeDesc removeTruePredciates(ExprNodeDesc e) {
+    if (e instanceof ExprNodeConstantDesc) {
+      ExprNodeConstantDesc eC = (ExprNodeConstantDesc) e;
+      if (e.getTypeInfo() == TypeInfoFactory.booleanTypeInfo
+          && eC.getValue() == Boolean.TRUE) {
+        return null;
+      }
+    }
+    return e;
   }
 
   /**
@@ -187,7 +246,8 @@ public class PartitionPruner implements 
    */
   static private ExprNodeDesc compactExpr(ExprNodeDesc expr) {
     if (expr instanceof ExprNodeConstantDesc) {
-      if (((ExprNodeConstantDesc)expr).getValue() == null) {
+      expr = removeTruePredciates(expr);
+      if (expr == null || ((ExprNodeConstantDesc)expr).getValue() == null) {
         return null;
       } else {
         throw new IllegalStateException("Unexpected non-null ExprNodeConstantDesc: "
@@ -198,10 +258,11 @@ public class PartitionPruner implements 
       boolean isAnd = udf instanceof GenericUDFOPAnd;
       if (isAnd || udf instanceof GenericUDFOPOr) {
         List<ExprNodeDesc> children = expr.getChildren();
-        ExprNodeDesc left = children.get(0);
-        children.set(0, compactExpr(left));
-        ExprNodeDesc right = children.get(1);
-        children.set(1, compactExpr(right));
+        ExprNodeDesc left = removeTruePredciates(children.get(0));
+        children.set(0, left == null ? null : compactExpr(left));
+        ExprNodeDesc right = removeTruePredciates(children.get(1));
+        children.set(1, right == null ? null : compactExpr(right));
+
         // Note that one does not simply compact (not-null or null) to not-null.
         // Only if we have an "and" is it valid to send one side to metastore.
         if (children.get(0) == null && children.get(1) == null) {
@@ -267,40 +328,8 @@ public class PartitionPruner implements 
   }
 
   private static PrunedPartitionList getPartitionsFromServer(Table tab,
-      ExprNodeDesc prunerExpr, HiveConf conf, String alias) throws SemanticException {
+      final ExprNodeGenericFuncDesc compactExpr, HiveConf conf, String alias, Set<String> partColsUsedInFilter, boolean isPruningByExactFilter) throws SemanticException {
     try {
-      if (!tab.isPartitioned()) {
-        // If the table is not partitioned, return everything.
-        return new PrunedPartitionList(tab, getAllPartitions(tab), null, false);
-      }
-      LOG.debug("tabname = " + tab.getTableName() + " is partitioned");
-
-      if ("strict".equalsIgnoreCase(HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE))
-          && !hasColumnExpr(prunerExpr)) {
-        // If the "strict" mode is on, we have to provide partition pruner for each table.
-        throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE
-            .getMsg("for Alias \"" + alias + "\" Table \"" + tab.getTableName() + "\""));
-      }
-
-      if (prunerExpr == null) {
-        // Non-strict mode, and there is no predicates at all - get everything.
-        return new PrunedPartitionList(tab, getAllPartitions(tab), null, false);
-      }
-
-      Set<String> referred = new LinkedHashSet<String>();
-      // Replace virtual columns with nulls. See javadoc for details.
-      prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab), referred);
-      // Remove all parts that are not partition columns. See javadoc for details.
-      ExprNodeGenericFuncDesc compactExpr = (ExprNodeGenericFuncDesc)compactExpr(prunerExpr.clone());
-      String oldFilter = prunerExpr.getExprString();
-      if (compactExpr == null) {
-        // Non-strict mode, and all the predicates are on non-partition columns - get everything.
-        LOG.debug("Filter " + oldFilter + " was null after compacting");
-        return new PrunedPartitionList(tab, getAllPartitions(tab), null, true);
-      }
-
-      LOG.debug("Filter w/ compacting: " + compactExpr.getExprString()
-        + "; filter w/o compacting: " + oldFilter);
 
       // Finally, check the filter for non-built-in UDFs. If these are present, we cannot
       // do filtering on the server, and have to fall back to client path.
@@ -330,9 +359,8 @@ public class PartitionPruner implements 
       // The partitions are "unknown" if the call says so due to the expression
       // evaluator returning null for a partition, or if we sent a partial expression to
       // metastore and so some partitions may have no data based on other filters.
-      boolean isPruningByExactFilter = oldFilter.equals(compactExpr.getExprString());
       return new PrunedPartitionList(tab, new LinkedHashSet<Partition>(partitions),
-          new ArrayList<String>(referred),
+          new ArrayList<String>(partColsUsedInFilter),
           hasUnknownPartitions || !isPruningByExactFilter);
     } catch (SemanticException e) {
       throw e;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Tue Sep 23 20:00:10 2014
@@ -207,7 +207,7 @@ public abstract class BaseSemanticAnalyz
   }
 
   public abstract void analyzeInternal(ASTNode ast) throws SemanticException;
-  public void init() {
+  public void init(boolean clearPartsCache) {
     //no-op
   }
 
@@ -217,7 +217,7 @@ public abstract class BaseSemanticAnalyz
 
   public void analyze(ASTNode ast, Context ctx) throws SemanticException {
     initCtx(ctx);
-    init();
+    init(true);
     analyzeInternal(ast);
   }
 
@@ -244,7 +244,7 @@ public abstract class BaseSemanticAnalyz
     this.fetchTask = fetchTask;
   }
 
-  protected void reset() {
+  protected void reset(boolean clearPartsCache) {
     rootTasks = new ArrayList<Task<? extends Serializable>>();
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java Tue Sep 23 20:00:10 2014
@@ -58,7 +58,7 @@ public class ColumnStatsSemanticAnalyzer
   private Table tbl;
 
   public ColumnStatsSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+    super(conf, false);
   }
 
   private boolean shouldRewrite(ASTNode tree) {
@@ -377,7 +377,7 @@ public class ColumnStatsSemanticAnalyzer
     QBParseInfo qbp;
 
     // initialize QB
-    init();
+    init(true);
 
     // check if it is no scan. grammar prevents coexit noscan/columns
     super.processNoScanCommand(ast);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Tue Sep 23 20:00:10 2014
@@ -131,7 +131,7 @@ public class ParseDriver {
    * so that the graph walking algorithms and the rules framework defined in
    * ql.lib can be used with the AST Nodes.
    */
-  static final TreeAdaptor adaptor = new CommonTreeAdaptor() {
+  public static final TreeAdaptor adaptor = new CommonTreeAdaptor() {
     /**
      * Creates an ASTNode for the given token. The ASTNode is a wrapper around
      * antlr's CommonTree class that implements the Node interface.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Tue Sep 23 20:00:10 2014
@@ -111,7 +111,7 @@ public final class ParseUtils {
    * @param tableFieldTypeInfo TypeInfo to convert to
    * @return Expression converting column to the type specified by tableFieldTypeInfo
    */
-  static ExprNodeDesc createConversionCast(ExprNodeDesc column, PrimitiveTypeInfo tableFieldTypeInfo)
+  public static ExprNodeDesc createConversionCast(ExprNodeDesc column, PrimitiveTypeInfo tableFieldTypeInfo)
       throws SemanticException {
     // Get base type, since type string may be parameterized
     String baseType = TypeInfoUtils.getBaseName(tableFieldTypeInfo.getTypeName());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Tue Sep 23 20:00:10 2014
@@ -49,7 +49,7 @@ public class RowResolver implements Seri
    * The primary(first) mapping is still only held in
    * invRslvMap.
    */
-  private Map<String, String[]> altInvRslvMap;
+  private final Map<String, String[]> altInvRslvMap;
   private  Map<String, ASTNode> expressionMap;
 
   // TODO: Refactor this and do in a more object oriented manner
@@ -351,4 +351,73 @@ public class RowResolver implements Seri
     this.expressionMap = expressionMap;
   }
 
+
+  // TODO: 1) How to handle collisions? 2) Should we be cloning ColumnInfo or
+  // not?
+  public static int add(RowResolver rrToAddTo, RowResolver rrToAddFrom,
+      int outputColPos, int numColumns) throws SemanticException {
+    String tabAlias;
+    String colAlias;
+    String[] qualifiedColName;
+    int i = 0;
+
+    for (ColumnInfo cInfoFrmInput : rrToAddFrom.getRowSchema().getSignature()) {
+      if ( numColumns >= 0 && i == numColumns ) {
+        break;
+      }
+      ColumnInfo newCI = null;
+      qualifiedColName = rrToAddFrom.getInvRslvMap().get(
+          cInfoFrmInput.getInternalName());
+      tabAlias = qualifiedColName[0];
+      colAlias = qualifiedColName[1];
+
+      newCI = new ColumnInfo(cInfoFrmInput);
+      newCI.setInternalName(SemanticAnalyzer
+          .getColumnInternalName(outputColPos));
+
+      outputColPos++;
+
+      if (rrToAddTo.get(tabAlias, colAlias) != null) {
+        LOG.debug("Found duplicate column alias in RR: " + rrToAddTo.get(tabAlias, colAlias));
+      } else {
+        rrToAddTo.put(tabAlias, colAlias, newCI);
+      }
+
+      qualifiedColName = rrToAddFrom.getAlternateMappings(cInfoFrmInput
+          .getInternalName());
+      if (qualifiedColName != null) {
+        tabAlias = qualifiedColName[0];
+        colAlias = qualifiedColName[1];
+        rrToAddTo.put(tabAlias, colAlias, newCI);
+      }
+      i++;
+    }
+
+    return outputColPos;
+	}
+
+  public static int add(RowResolver rrToAddTo, RowResolver rrToAddFrom,
+      int outputColPos) throws SemanticException {
+    return add(rrToAddTo, rrToAddFrom, outputColPos, -1);
+  }
+
+	/**
+	 * Return a new row resolver that is combination of left RR and right RR.
+	 * The schema will be schema of left, schema of right
+	 *
+	 * @param leftRR
+	 * @param rightRR
+	 * @return
+	 * @throws SemanticException
+	 */
+	public static RowResolver getCombinedRR(RowResolver leftRR,
+			RowResolver rightRR) throws SemanticException {
+		int outputColPos = 0;
+
+		RowResolver combinedRR = new RowResolver();
+		outputColPos = add(combinedRR, leftRR, outputColPos);
+		outputColPos = add(combinedRR, rightRR, outputColPos);
+
+		return combinedRR;
+	}
 }